diff --git a/.github/workflows/test_integration_spcs.yaml b/.github/workflows/test_integration_spcs.yaml index 00973e2821..1f60f5b6ac 100644 --- a/.github/workflows/test_integration_spcs.yaml +++ b/.github/workflows/test_integration_spcs.yaml @@ -1,6 +1,9 @@ name: SPCS Integration testing on: + pull_request: + branches: + - release* push: tags: - "v*" # Push events to matching v*, i.e. v1.0, v20.15.10 diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 0641ea823a..3a7e49f969 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -18,6 +18,35 @@ ## Deprecations +## New additions + +## Fixes and improvements + + +# v3.7.2 + +## Deprecations + +## New additions + +## Fixes and improvements +* Fix error appearing on help messages after click BCR update. + + +# v3.7.1 + +## Deprecations + +## New additions + +## Fixes and improvements +* Fix certificate connection issues. +* Fix `snow spcs image-registry login` slow query problem. + +# v3.7.0 + +## Deprecations + ## New additions * Added `--prune` flag to `deploy` commands, which removes files that exist in the stage, but not in the local filesystem. @@ -25,6 +54,7 @@ * Added `snow helper check-snowsql-env-vars` which reports environment variables from SnowSQL with replacements in CLI. ## Fixes and improvements +* Updated MacOS postinstall script to update PATH if snow not exist. # v3.6.0 diff --git a/pyproject.toml b/pyproject.toml index ad9868837d..b2ea75954b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,23 +25,24 @@ requires-python = ">=3.10" description = "Snowflake CLI" readme = "README.md" dependencies = [ + "click==8.1.8", "GitPython==3.1.44", "jinja2==3.1.6", "packaging", "pip", "pluggy==1.5.0", - "pydantic==2.11.2", + "pydantic==2.11.3", "PyYAML==6.0.2", "requests==2.32.3", "requirements-parser==0.11.0", "rich==14.0.0", "setuptools==78.1.0", - "snowflake-connector-python[secure-local-storage]==3.14.0", + "snowflake-connector-python[secure-local-storage]==3.15.0", 'snowflake-snowpark-python>=1.15.0,<1.26.0;python_version < "3.12"', - 'snowflake.core==1.2.0; python_version < "3.12"', + 'snowflake.core==1.3.0; python_version < "3.12"', "tomlkit==0.13.2", "typer==0.15.2", - "urllib3>=1.24.3,<2.4", + "urllib3>=1.24.3,<2.5", ] classifiers = [ "Development Status :: 5 - Production/Stable", @@ -61,7 +62,7 @@ development = [ "pre-commit>=3.5.0", "pytest==8.3.5", "pytest-randomly==3.16.0", - "pytest-httpserver==1.1.2", + "pytest-httpserver==1.1.3", "syrupy==4.9.1", "factory-boy==3.3.3", "Faker==37.1.0", @@ -108,7 +109,6 @@ features = ["development", "packaging"] build-isolated-binary = ["python scripts/packaging/build_isolated_binary_with_hatch.py"] build-binaries = ["./scripts/packaging/build_binaries.sh"] build-packages = ["./scripts/packaging/build_packages.sh"] -package-darwin-binaries = ["./scripts/packaging/build_darwin_package.sh"] build-all = [ "./scripts/packaging/build_binaries.sh", "./scripts/packaging/build_packages.sh", diff --git a/scripts/packaging/build_darwin_package.sh b/scripts/packaging/build_darwin_package.sh index b4cf4c9c1e..cbcbe3a81a 100755 --- a/scripts/packaging/build_darwin_package.sh +++ b/scripts/packaging/build_darwin_package.sh @@ -1,19 +1,44 @@ #!/usr/bin/env bash set -xeuo pipefail -git config --global --add safe.directory /snowflake-cli -brew install -q tree - -ROOT_DIR=$(git rev-parse --show-toplevel) -PACKAGING_DIR=$ROOT_DIR/scripts/packaging - SYSTEM=$(uname -s | tr '[:upper:]' '[:lower:]') MACHINE=$(uname -m | tr '[:upper:]' '[:lower:]') PLATFORM="${SYSTEM}-${MACHINE}" +echo "--- creating virtualenv ---" +python3.11 -m venv venv +. venv/bin/activate +python --version + +echo "--- installing dependencies ---" +pip install hatch + +# install cargo +if [[ ${MACHINE} == "arm64" ]]; then + echo "installing cargo on arm64" + curl https://sh.rustup.rs -sSf | bash -s -- -y +elif [[ ${MACHINE} == "x86_64" ]]; then + echo "installing cargo on x86_64" + curl https://sh.rustup.rs -sSf | bash -s -- -y --no-modify-path + source $HOME/.cargo/env +else + echo "Unsupported machine: ${MACHINE}" + exit 1 +fi +rustup default stable + + +echo "--- setup variables ---" +BRANCH=${branch} +REVISION=$(git rev-parse ${svnRevision}) CLI_VERSION=$(hatch version) +STAGE_URL="s3://sfc-eng-jenkins/repository/snowflake-cli/${releaseType}/${SYSTEM}_${MACHINE}/${REVISION}/" + +ROOT_DIR=$(git rev-parse --show-toplevel) +PACKAGING_DIR=$ROOT_DIR/scripts/packaging DIST_DIR=$ROOT_DIR/dist + BINARY_NAME="snow-${CLI_VERSION}" APP_NAME="SnowflakeCLI.app" APP_DIR=$DIST_DIR/app @@ -21,6 +46,7 @@ APP_SCRIPTS=$APP_DIR/scripts CODESIGN_IDENTITY="Developer ID Application: Snowflake Computing INC. (W4NT6CRQ7U)" PRODUCTSIGN_IDENTITY="Developer ID Installer: Snowflake Computing INC. (W4NT6CRQ7U)" + loginfo() { logger -s -p INFO -- $1 } @@ -29,26 +55,6 @@ clean_build_workspace() { rm -rf $DIST_DIR || true } -install_cargo() { - curl https://sh.rustup.rs -sSf > rustup-init.sh - - if [[ ${MACHINE} == "arm64" ]]; then - sudo bash rustup-init.sh -y - . $HOME/.cargo/env - elif [[ ${MACHINE} == "x86_64" ]]; then - export CARGO_HOME="$HOME/.cargo" - export RUSTUP_HOME="$HOME/.rustup" - bash -s rustup-init.sh -y - . $HOME/.cargo/env - rustup default stable - else - echo "Unsupported machine: ${MACHINE}" - exit 1 - fi - - rm rustup-init.sh -} - create_app_template() { rm -r ${APP_DIR}/${APP_NAME} || true mkdir -p ${APP_DIR}/${APP_NAME}/Contents/MacOS @@ -61,9 +67,9 @@ loginfo "---------------------------------" security find-identity -v -p codesigning loginfo "---------------------------------" -clean_build_workspace -install_cargo +echo "--- build binary ---" +clean_build_workspace hatch -e packaging run build-isolated-binary create_app_template mv $DIST_DIR/binary/${BINARY_NAME} ${APP_DIR}/${APP_NAME}/Contents/MacOS/snow @@ -118,7 +124,6 @@ prepare_postinstall_script() { prepare_postinstall_script ls -l $DIST_DIR -tree -d $DIST_DIR chmod +x $APP_SCRIPTS/postinstall @@ -209,3 +214,9 @@ validate_installation() { } validate_installation $DIST_DIR/snowflake-cli-${CLI_VERSION}-${SYSTEM}-${MACHINE}.pkg + +echo "--- Upload artifacts to AWS ---" +ls -la ./dist/ +echo "${STAGE_URL}" +command -v aws +aws s3 cp ./dist/ ${STAGE_URL} --recursive --exclude "*" --include="snowflake-cli-${CLI_VERSION}*.pkg" diff --git a/scripts/packaging/macos/postinstall b/scripts/packaging/macos/postinstall index 2f5b70b198..b42938bda4 100755 --- a/scripts/packaging/macos/postinstall +++ b/scripts/packaging/macos/postinstall @@ -3,6 +3,7 @@ # $2 is the install location # SNOWFLAKE_CLI_COMMENT="# added by Snowflake SnowflakeCLI installer v1.0" +RC_FILES=(~/.zprofile ~/.zshrc ~/.profile ~/.bash_profile ~/.bashrc) function add_dest_path_to_profile() { local dest=$1 @@ -18,19 +19,21 @@ export PATH=$dest:\$PATH" >>$profile echo "[DEBUG] Parameters: $1 $2" SNOWFLAKE_CLI_DEST=$2/SnowflakeCLI.app/Contents/MacOS/ -SNOWFLAKE_CLI_LOGIN_SHELL=~/.profile -if [[ -e ~/.zprofile ]]; then - SNOWFLAKE_CLI_LOGIN_SHELL=~/.zprofile -elif [[ -e ~/.zshrc ]]; then - SNOWFLAKE_CLI_LOGIN_SHELL=~/.zshrc -elif [[ -e ~/.profile ]]; then - SNOWFLAKE_CLI_LOGIN_SHELL=~/.profile -elif [[ -e ~/.bash_profile ]]; then - SNOWFLAKE_CLI_LOGIN_SHELL=~/.bash_profile -elif [[ -e ~/.bashrc ]]; then - SNOWFLAKE_CLI_LOGIN_SHELL=~/.bashrc -fi +# List of potential login shell RC files + +# Check if the path is already in the PATH variable +if [[ ":$PATH:" == *":$SNOWFLAKE_CLI_DEST:"* ]]; then + echo "[INFO] Path $SNOWFLAKE_CLI_DEST is already in PATH. No changes needed." +else + for rc_file in "${RC_FILES[@]}"; do + # Expand tilde (~) to the user's home directory + rc_file_expanded=$(eval echo "$rc_file") -if ! grep -q -E "^$SNOWFLAKE_CLI_COMMENT" $SNOWFLAKE_CLI_LOGIN_SHELL; then - add_dest_path_to_profile $SNOWFLAKE_CLI_DEST $SNOWFLAKE_CLI_LOGIN_SHELL + if [[ -e "$rc_file_expanded" ]]; then + # Add the PATH update to the file + add_dest_path_to_profile "$SNOWFLAKE_CLI_DEST" "$rc_file_expanded" + else + echo "[INFO] $rc_file_expanded does not exist, skipping..." + fi + done fi diff --git a/scripts/packaging/setup_darwin.sh b/scripts/packaging/setup_darwin.sh deleted file mode 100644 index 68b66f875d..0000000000 --- a/scripts/packaging/setup_darwin.sh +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env bash -set -o pipefail - -echo "Setting up the Snowflake CLI build environment" -MACHINE=$(uname -m) - -ensure_pyenv_installation() { - if ! command -v pyenv &>/dev/null; then - echo "pyenv not found, installing..." - arch -${MACHINE} brew install pyenv - else - echo "pyenv already installed" - fi -} - -activate_pyenv() { - export PYENV_ROOT="$HOME/.pyenv" - [[ -d $PYENV_ROOT/bin ]] && export PATH="$PYENV_ROOT/bin:$PATH" - eval "$(pyenv init - bash)" -} - -ensure_hatch_installation() { - if ! command -v hatch &>/dev/null; then - echo "hatch not found, installing..." - arch -${MACHINE} brew install hatch - else - echo "hatch already installed" - arch -${MACHINE} brew upgrade hatch - fi -} - -ensure_python_installation() { - pyenv versions - pyenv install -s 3.10 - pyenv install -s 3.11 - pyenv global 3.11 - python --version - pip install -U pip uv hatch awscli -} diff --git a/snyk/requirements.txt b/snyk/requirements.txt index 7cd1828e5f..aaaeb3ffaa 100644 --- a/snyk/requirements.txt +++ b/snyk/requirements.txt @@ -1,25 +1,26 @@ +click==8.1.8 GitPython==3.1.44 jinja2==3.1.6 packaging pip pluggy==1.5.0 -pydantic==2.11.2 +pydantic==2.11.3 PyYAML==6.0.2 requests==2.32.3 requirements-parser==0.11.0 rich==14.0.0 setuptools==78.1.0 -snowflake-connector-python[secure-local-storage]==3.14.0 +snowflake-connector-python[secure-local-storage]==3.15.0 snowflake-snowpark-python>=1.15.0,<1.26.0;python_version < "3.12" -snowflake.core==1.2.0; python_version < "3.12" +snowflake.core==1.3.0; python_version < "3.12" tomlkit==0.13.2 typer==0.15.2 -urllib3>=1.24.3,<2.4 +urllib3>=1.24.3,<2.5 coverage==7.8.0 pre-commit>=3.5.0 pytest==8.3.5 pytest-randomly==3.16.0 -pytest-httpserver==1.1.2 +pytest-httpserver==1.1.3 syrupy==4.9.1 factory-boy==3.3.3 Faker==37.1.0 diff --git a/src/snowflake/cli/__about__.py b/src/snowflake/cli/__about__.py index 19520dac3c..dd44e4c5b4 100644 --- a/src/snowflake/cli/__about__.py +++ b/src/snowflake/cli/__about__.py @@ -16,7 +16,7 @@ from enum import Enum, unique -VERSION = "3.7.0.dev0" +VERSION = "3.8.0.dev+dbt0" @unique diff --git a/src/snowflake/cli/_app/commands_registration/builtin_plugins.py b/src/snowflake/cli/_app/commands_registration/builtin_plugins.py index 14012a48c4..f3c8a75e6d 100644 --- a/src/snowflake/cli/_app/commands_registration/builtin_plugins.py +++ b/src/snowflake/cli/_app/commands_registration/builtin_plugins.py @@ -15,6 +15,7 @@ from snowflake.cli._plugins.auth.keypair import plugin_spec as auth_plugin_spec from snowflake.cli._plugins.connection import plugin_spec as connection_plugin_spec from snowflake.cli._plugins.cortex import plugin_spec as cortex_plugin_spec +from snowflake.cli._plugins.dbt import plugin_spec as dbt_plugin_spec from snowflake.cli._plugins.git import plugin_spec as git_plugin_spec from snowflake.cli._plugins.helpers import plugin_spec as migrate_plugin_spec from snowflake.cli._plugins.init import plugin_spec as init_plugin_spec @@ -52,6 +53,7 @@ def get_builtin_plugin_name_to_plugin_spec(): "init": init_plugin_spec, "workspace": workspace_plugin_spec, "plugin": plugin_plugin_spec, + "dbt": dbt_plugin_spec, "logs": logs_plugin_spec, } diff --git a/src/snowflake/cli/_plugins/dbt/__init__.py b/src/snowflake/cli/_plugins/dbt/__init__.py new file mode 100644 index 0000000000..e612998b27 --- /dev/null +++ b/src/snowflake/cli/_plugins/dbt/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2025 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/src/snowflake/cli/_plugins/dbt/commands.py b/src/snowflake/cli/_plugins/dbt/commands.py new file mode 100644 index 0000000000..e8a03b97be --- /dev/null +++ b/src/snowflake/cli/_plugins/dbt/commands.py @@ -0,0 +1,184 @@ +# Copyright (c) 2025 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +import logging +from typing import Optional + +import typer +from click import ClickException, types +from rich.progress import Progress, SpinnerColumn, TextColumn +from snowflake.cli._plugins.dbt.constants import ( + DBT_COMMANDS, + OUTPUT_COLUMN_NAME, + RESULT_COLUMN_NAME, +) +from snowflake.cli._plugins.dbt.manager import DBTManager +from snowflake.cli._plugins.object.command_aliases import add_object_command_aliases +from snowflake.cli._plugins.object.commands import scope_option +from snowflake.cli.api.commands.decorators import global_options_with_connection +from snowflake.cli.api.commands.flags import identifier_argument, like_option +from snowflake.cli.api.commands.snow_typer import SnowTyperFactory +from snowflake.cli.api.constants import ObjectType +from snowflake.cli.api.feature_flags import FeatureFlag +from snowflake.cli.api.identifiers import FQN +from snowflake.cli.api.output.types import ( + CommandResult, + MessageResult, + QueryResult, +) +from snowflake.cli.api.secure_path import SecurePath + +app = SnowTyperFactory( + name="dbt", + help="Manages dbt on Snowflake projects", + is_hidden=FeatureFlag.ENABLE_DBT.is_disabled, +) +log = logging.getLogger(__name__) + + +DBTNameArgument = identifier_argument(sf_object="DBT Project", example="my_pipeline") + +# in passthrough commands we need to support that user would either provide the name of dbt object or name of dbt +# command, in which case FQN validation could fail +DBTNameOrCommandArgument = identifier_argument( + sf_object="DBT Project", example="my_pipeline", click_type=types.StringParamType() +) + +add_object_command_aliases( + app=app, + object_type=ObjectType.DBT_PROJECT, + name_argument=DBTNameArgument, + like_option=like_option( + help_example='`list --like "my%"` lists all dbt projects that begin with “my”' + ), + scope_option=scope_option(help_example="`list --in database my_db`"), + ommit_commands=["drop", "create", "describe"], +) + + +@app.command( + "deploy", + requires_connection=True, +) +def deploy_dbt( + name: FQN = DBTNameArgument, + source: Optional[str] = typer.Option( + help="Path to directory containing dbt files to deploy. Defaults to current working directory.", + show_default=False, + default=None, + ), + profiles_dir: Optional[str] = typer.Option( + help="Path to directory containing profiles.yml. Defaults to directory provided in --source or current working directory", + show_default=False, + default=None, + ), + force: Optional[bool] = typer.Option( + False, + help="Overwrites conflicting files in the project, if any.", + ), + **options, +) -> CommandResult: + """ + Copy dbt files and create or update dbt on Snowflake project. + """ + project_path = SecurePath(source) if source is not None else SecurePath.cwd() + profiles_dir_path = SecurePath(profiles_dir) if profiles_dir else project_path + return QueryResult( + DBTManager().deploy( + name, + project_path.resolve(), + profiles_dir_path.resolve(), + force=force, + ) + ) + + +# `execute` is a pass through command group, meaning that all params after command should be passed over as they are, +# suppressing usual CLI behaviour for displaying help or formatting options. +dbt_execute_app = SnowTyperFactory( + name="execute", + help="Execute a dbt command on Snowflake", + subcommand_metavar="DBT_COMMAND", +) +app.add_typer(dbt_execute_app) + + +@dbt_execute_app.callback() +@global_options_with_connection +def before_callback( + name: str = DBTNameOrCommandArgument, + run_async: Optional[bool] = typer.Option( + False, help="Run dbt command asynchronously and check it's result later." + ), + **options, +): + """Handles global options passed before the command and takes pipeline name to be accessed through child context later""" + pass + + +for cmd in DBT_COMMANDS: + + @dbt_execute_app.command( + name=cmd, + requires_connection=False, + requires_global_options=False, + context_settings={"allow_extra_args": True, "ignore_unknown_options": True}, + help=f"Execute {cmd} command on Snowflake.", + add_help_option=False, + ) + def _dbt_execute( + ctx: typer.Context, + ) -> CommandResult: + dbt_cli_args = ctx.args + dbt_command = ctx.command.name + name = FQN.from_string(ctx.parent.params["name"]) + run_async = ctx.parent.params["run_async"] + execute_args = (dbt_command, name, run_async, *dbt_cli_args) + dbt_manager = DBTManager() + + if run_async is True: + result = dbt_manager.execute(*execute_args) + return MessageResult( + f"Command submitted. You can check the result with `snow sql -q \"select execution_status from table(information_schema.query_history_by_user()) where query_id in ('{result.sfqid}');\"`" + ) + + with Progress( + SpinnerColumn(), + TextColumn("[progress.description]{task.description}"), + transient=True, + ) as progress: + progress.add_task(description=f"Executing 'dbt {dbt_command}'", total=None) + + result = dbt_manager.execute(*execute_args) + + try: + columns = [column.name for column in result.description] + success_column_index = columns.index(RESULT_COLUMN_NAME) + stdout_column_index = columns.index(OUTPUT_COLUMN_NAME) + except ValueError: + raise ClickException("Malformed server response") + try: + is_success, output = [ + (row[success_column_index], row[stdout_column_index]) + for row in result + ][-1] + except IndexError: + raise ClickException("No data returned from server") + + if is_success is True: + return MessageResult(output) + else: + raise ClickException(output) diff --git a/src/snowflake/cli/_plugins/dbt/constants.py b/src/snowflake/cli/_plugins/dbt/constants.py new file mode 100644 index 0000000000..b530a2f19e --- /dev/null +++ b/src/snowflake/cli/_plugins/dbt/constants.py @@ -0,0 +1,40 @@ +# Copyright (c) 2025 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +RESULT_COLUMN_NAME = "SUCCESS" +OUTPUT_COLUMN_NAME = "STDOUT" + +DBT_COMMANDS = [ + "build", + "compile", + "deps", + "list", + "parse", + "run", + "run-operation", + "seed", + "show", + "snapshot", + "test", +] + +UNSUPPORTED_COMMANDS = [ + "clean", + "clone", + "debug", + "docs", + "init", + "retry", + "source", +] diff --git a/src/snowflake/cli/_plugins/dbt/manager.py b/src/snowflake/cli/_plugins/dbt/manager.py new file mode 100644 index 0000000000..cf2909ae43 --- /dev/null +++ b/src/snowflake/cli/_plugins/dbt/manager.py @@ -0,0 +1,158 @@ +# Copyright (c) 2025 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from collections import defaultdict + +import yaml +from snowflake.cli._plugins.object.manager import ObjectManager +from snowflake.cli._plugins.stage.manager import StageManager +from snowflake.cli.api.console import cli_console +from snowflake.cli.api.constants import DEFAULT_SIZE_LIMIT_MB, ObjectType +from snowflake.cli.api.exceptions import CliError +from snowflake.cli.api.identifiers import FQN +from snowflake.cli.api.secure_path import SecurePath +from snowflake.cli.api.sql_execution import SqlExecutionMixin +from snowflake.connector.cursor import SnowflakeCursor + + +class DBTManager(SqlExecutionMixin): + def list(self) -> SnowflakeCursor: # noqa: A003 + query = "SHOW DBT PROJECTS" + return self.execute_query(query) + + @staticmethod + def exists(name: FQN) -> bool: + return ObjectManager().object_exists( + object_type=ObjectType.DBT_PROJECT.value.cli_name, fqn=name + ) + + def deploy( + self, + name: FQN, + path: SecurePath, + profiles_path: SecurePath, + force: bool, + ) -> SnowflakeCursor: + dbt_project_path = path / "dbt_project.yml" + if not dbt_project_path.exists(): + raise CliError( + f"dbt_project.yml does not exist in directory {path.path.absolute()}." + ) + + with dbt_project_path.open(read_file_limit_mb=DEFAULT_SIZE_LIMIT_MB) as fd: + dbt_project = yaml.safe_load(fd) + try: + profile = dbt_project["profile"] + except KeyError: + raise CliError("`profile` is not defined in dbt_project.yml") + + self._validate_profiles(profiles_path, profile) + + with cli_console.phase("Creating temporary stage"): + stage_manager = StageManager() + stage_fqn = FQN.from_string(f"dbt_{name}_stage").using_context() + stage_name = stage_manager.get_standard_stage_prefix(stage_fqn) + stage_manager.create(stage_fqn, temporary=True) + + with cli_console.phase("Copying project files to stage"): + result_count = len(list(stage_manager.put_recursive(path.path, stage_name))) + if profiles_path != path: + stage_manager.put( + str((profiles_path.path / "profiles.yml").absolute()), stage_name + ) + result_count += 1 + cli_console.step(f"Copied {result_count} files") + + with cli_console.phase("Creating DBT project"): + if force is True: + query = f"CREATE OR REPLACE DBT PROJECT {name}" + elif self.exists(name=name): + query = f"ALTER DBT PROJECT {name} ADD VERSION" + else: + query = f"CREATE DBT PROJECT {name}" + query += f"\nFROM {stage_name}" + return self.execute_query(query) + + @staticmethod + def _validate_profiles(profiles_path: SecurePath, target_profile: str) -> None: + """ + Validates that: + * profiles.yml exists + * contain profile specified in dbt_project.yml + * no other profiles are defined there + * does not contain any confidential data like passwords + """ + profiles_file = profiles_path / "profiles.yml" + if not profiles_file.exists(): + raise CliError( + f"profiles.yml does not exist in directory {profiles_path.path.absolute()}." + ) + with profiles_file.open(read_file_limit_mb=DEFAULT_SIZE_LIMIT_MB) as fd: + profiles = yaml.safe_load(fd) + + if target_profile not in profiles: + raise CliError(f"profile {target_profile} is not defined in profiles.yml") + + errors = defaultdict(list) + if len(profiles.keys()) > 1: + for profile_name in profiles.keys(): + if profile_name.lower() != target_profile.lower(): + errors[profile_name].append("Remove unnecessary profiles") + + required_fields = { + "account", + "database", + "role", + "schema", + "type", + "user", + "warehouse", + } + supported_fields = { + "threads", + } + for target_name, target in profiles[target_profile]["outputs"].items(): + if missing_keys := required_fields - set(target.keys()): + errors[target_profile].append( + f"Missing required fields: {', '.join(sorted(missing_keys))} in target {target_name}" + ) + if ( + unsupported_keys := set(target.keys()) + - required_fields + - supported_fields + ): + errors[target_profile].append( + f"Unsupported fields found: {', '.join(sorted(unsupported_keys))} in target {target_name}" + ) + if "type" in target and target["type"].lower() != "snowflake": + errors[target_profile].append( + f"Value for type field is invalid. Should be set to `snowflake` in target {target_name}" + ) + + if errors: + message = "Found following errors in profiles.yml. Please fix them before proceeding:" + for target, issues in errors.items(): + message += f"\n{target}" + message += "\n * " + "\n * ".join(issues) + raise CliError(message) + + def execute( + self, dbt_command: str, name: str, run_async: bool, *dbt_cli_args + ) -> SnowflakeCursor: + if dbt_cli_args: + dbt_command = " ".join([dbt_command, *dbt_cli_args]).strip() + query = f"EXECUTE DBT PROJECT {name} args='{dbt_command}'" + return self.execute_query(query, _exec_async=run_async) diff --git a/src/snowflake/cli/_plugins/dbt/plugin_spec.py b/src/snowflake/cli/_plugins/dbt/plugin_spec.py new file mode 100644 index 0000000000..59c15975df --- /dev/null +++ b/src/snowflake/cli/_plugins/dbt/plugin_spec.py @@ -0,0 +1,30 @@ +# Copyright (c) 2025 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from snowflake.cli._plugins.dbt import commands +from snowflake.cli.api.plugins.command import ( + SNOWCLI_ROOT_COMMAND_PATH, + CommandSpec, + CommandType, + plugin_hook_impl, +) + + +@plugin_hook_impl +def command_spec(): + return CommandSpec( + parent_command_path=SNOWCLI_ROOT_COMMAND_PATH, + command_type=CommandType.COMMAND_GROUP, + typer_instance=commands.app.create_instance(), + ) diff --git a/src/snowflake/cli/_plugins/logs/commands.py b/src/snowflake/cli/_plugins/logs/commands.py index 67c7a5d820..eccfafefb3 100644 --- a/src/snowflake/cli/_plugins/logs/commands.py +++ b/src/snowflake/cli/_plugins/logs/commands.py @@ -4,9 +4,11 @@ import typer from click import ClickException -from snowflake.cli._plugins.logs.manager import LogsManager, LogsQueryRow +from snowflake.cli._plugins.logs.manager import LogsManager +from snowflake.cli._plugins.logs.utils import LOG_LEVELS, LogsQueryRow from snowflake.cli._plugins.object.commands import NameArgument, ObjectArgument from snowflake.cli.api.commands.snow_typer import SnowTyperFactory +from snowflake.cli.api.exceptions import CliArgumentError from snowflake.cli.api.identifiers import FQN from snowflake.cli.api.output.types import ( CommandResult, @@ -41,11 +43,22 @@ def get_logs( "--table", help="The table to query for logs. If not provided, the default table will be used", ), + log_level: Optional[str] = typer.Option( + "INFO", + "--log-level", + help="The log level to filter by. If not provided, INFO will be used", + ), **options, ): """ Retrieves logs for a given object. """ + + if log_level and not log_level.upper() in LOG_LEVELS: + raise CliArgumentError( + f"Invalid log level. Please choose from {', '.join(LOG_LEVELS)}" + ) + if refresh_time and to: raise ClickException( "You cannot set both --refresh and --to parameters. Please check the values" @@ -61,6 +74,7 @@ def get_logs( from_time=from_time, refresh_time=refresh_time, event_table=event_table, + log_level=log_level, ) logs = itertools.chain( (MessageResult(log.log_message) for logs in logs_stream for log in logs) @@ -72,6 +86,7 @@ def get_logs( from_time=from_time, to_time=to_time, event_table=event_table, + log_level=log_level, ) logs = (MessageResult(log.log_message) for log in logs_iterable) # type: ignore diff --git a/src/snowflake/cli/_plugins/logs/manager.py b/src/snowflake/cli/_plugins/logs/manager.py index 5a2db4822d..d999dfb3c2 100644 --- a/src/snowflake/cli/_plugins/logs/manager.py +++ b/src/snowflake/cli/_plugins/logs/manager.py @@ -1,26 +1,19 @@ import time from datetime import datetime from textwrap import dedent -from typing import Iterable, List, NamedTuple, Optional, Tuple +from typing import Iterable, List, Optional -from click import ClickException +from snowflake.cli._plugins.logs.utils import ( + LogsQueryRow, + get_timestamp_query, + parse_log_levels_for_query, + sanitize_logs, +) from snowflake.cli._plugins.object.commands import NameArgument, ObjectArgument from snowflake.cli.api.identifiers import FQN from snowflake.cli.api.sql_execution import SqlExecutionMixin from snowflake.connector.cursor import SnowflakeCursor -LogsQueryRow = NamedTuple( - "LogsQueryRow", - [ - ("timestamp", datetime), - ("database_name", str), - ("schema_name", str), - ("object_name", str), - ("log_level", str), - ("log_message", str), - ], -) - class LogsManager(SqlExecutionMixin): def stream_logs( @@ -30,6 +23,7 @@ def stream_logs( object_name: FQN = NameArgument, from_time: Optional[datetime] = None, event_table: Optional[str] = None, + log_level: Optional[str] = "INFO", ) -> Iterable[List[LogsQueryRow]]: try: previous_end = from_time @@ -41,6 +35,7 @@ def stream_logs( from_time=previous_end, to_time=None, event_table=event_table, + log_level=log_level, ).fetchall() if raw_logs: @@ -60,6 +55,7 @@ def get_logs( from_time: Optional[datetime] = None, to_time: Optional[datetime] = None, event_table: Optional[str] = None, + log_level: Optional[str] = "INFO", ) -> Iterable[LogsQueryRow]: """ Basic function to get a single batch of logs from the server @@ -71,9 +67,10 @@ def get_logs( from_time=from_time, to_time=to_time, event_table=event_table, + log_level=log_level, ) - return self.sanitize_logs(logs) + return sanitize_logs(logs) def get_raw_logs( self, @@ -82,6 +79,7 @@ def get_raw_logs( from_time: Optional[datetime] = None, to_time: Optional[datetime] = None, event_table: Optional[str] = None, + log_level: Optional[str] = "INFO", ) -> SnowflakeCursor: table = event_table if event_table else "SNOWFLAKE.TELEMETRY.EVENTS" @@ -97,9 +95,9 @@ def get_raw_logs( value::string as log_message FROM {table} WHERE record_type = 'LOG' - AND (record:severity_text = 'INFO' or record:severity_text is NULL ) + AND (record:severity_text IN ({parse_log_levels_for_query((log_level))}) or record:severity_text is NULL ) AND object_name = '{object_name}' - {self._get_timestamp_query(from_time, to_time)} + {get_timestamp_query(from_time, to_time)} ORDER BY timestamp; """ ).strip() @@ -107,32 +105,3 @@ def get_raw_logs( result = self.execute_query(query) return result - - def _get_timestamp_query( - self, from_time: Optional[datetime], to_time: Optional[datetime] - ): - if from_time and to_time and from_time > to_time: - raise ClickException( - "From_time cannot be later than to_time. Please check the values" - ) - query = [] - - if from_time is not None: - query.append( - f"AND timestamp >= TO_TIMESTAMP_LTZ('{from_time.isoformat()}')\n" - ) - - if to_time is not None: - query.append( - f"AND timestamp <= TO_TIMESTAMP_LTZ('{to_time.isoformat()}')\n" - ) - - return "".join(query) - - def sanitize_logs(self, logs: SnowflakeCursor | List[Tuple]) -> List[LogsQueryRow]: - try: - return [LogsQueryRow(*log) for log in logs] - except TypeError: - raise ClickException( - "Logs table has incorrect format. Please check the logs_table in your database" - ) diff --git a/src/snowflake/cli/_plugins/logs/utils.py b/src/snowflake/cli/_plugins/logs/utils.py new file mode 100644 index 0000000000..dbd45da433 --- /dev/null +++ b/src/snowflake/cli/_plugins/logs/utils.py @@ -0,0 +1,60 @@ +from datetime import datetime +from typing import List, NamedTuple, Optional, Tuple + +from snowflake.cli.api.exceptions import CliArgumentError, CliSqlError +from snowflake.connector.cursor import SnowflakeCursor + +LOG_LEVELS = ["TRACE", "DEBUG", "INFO", "WARN", "ERROR", "FATAL"] + +LogsQueryRow = NamedTuple( + "LogsQueryRow", + [ + ("timestamp", datetime), + ("database_name", str), + ("schema_name", str), + ("object_name", str), + ("log_level", str), + ("log_message", str), + ], +) + + +def sanitize_logs(logs: SnowflakeCursor | List[Tuple]) -> List[LogsQueryRow]: + try: + return [LogsQueryRow(*log) for log in logs] + except TypeError: + raise CliSqlError( + "Logs table has incorrect format. Please check the logs_table in your database" + ) + + +def get_timestamp_query(from_time: Optional[datetime], to_time: Optional[datetime]): + if from_time and to_time and from_time > to_time: + raise CliArgumentError( + "From_time cannot be later than to_time. Please check the values" + ) + query = [] + + if from_time is not None: + query.append(f"AND timestamp >= TO_TIMESTAMP_LTZ('{from_time.isoformat()}')\n") + + if to_time is not None: + query.append(f"AND timestamp <= TO_TIMESTAMP_LTZ('{to_time.isoformat()}')\n") + + return "".join(query) + + +def get_log_levels(log_level: str): + if log_level.upper() not in LOG_LEVELS and log_level != "": + raise CliArgumentError( + f"Invalid log level. Please choose from {', '.join(LOG_LEVELS)}" + ) + + if log_level == "": + log_level = "INFO" + + return LOG_LEVELS[LOG_LEVELS.index(log_level.upper()) :] + + +def parse_log_levels_for_query(log_level: str): + return ", ".join(f"'{level}'" for level in get_log_levels(log_level)) diff --git a/src/snowflake/cli/_plugins/snowpark/snowpark_entity.py b/src/snowflake/cli/_plugins/snowpark/snowpark_entity.py index 7ff04062cb..e1f9c25da1 100644 --- a/src/snowflake/cli/_plugins/snowpark/snowpark_entity.py +++ b/src/snowflake/cli/_plugins/snowpark/snowpark_entity.py @@ -231,7 +231,7 @@ def _process_requirements( # TODO: maybe leave all the logic with requirements ) zip_dir( - source=tmp_dir, + source=tmp_dir.path, dest_zip=bundle_dir / archive_name, ) diff --git a/src/snowflake/cli/_plugins/spcs/image_registry/manager.py b/src/snowflake/cli/_plugins/spcs/image_registry/manager.py index fe051beec7..4d11287e47 100644 --- a/src/snowflake/cli/_plugins/spcs/image_registry/manager.py +++ b/src/snowflake/cli/_plugins/spcs/image_registry/manager.py @@ -74,12 +74,21 @@ def _has_url_scheme(self, url: str): return re.fullmatch(r"^.*//.+", url) is not None def get_registry_url(self) -> str: - repositories_query = "show image repositories in account" - result_set = self.execute_query(repositories_query, cursor_class=DictCursor) - results = result_set.fetchall() - if len(results) == 0: - raise NoImageRepositoriesFoundError() - sample_repository_url = results[0]["repository_url"] + images_query = "show image repositories in schema snowflake.images;" + images_result = self.execute_query(images_query, cursor_class=DictCursor) + + results = images_result.fetchone() + + if not results: + # fallback to account level query - slower one, so we try to avoid it if possible + repositories_query = "show image repositories in account" + result_set = self.execute_query(repositories_query, cursor_class=DictCursor) + results = result_set.fetchone() + + if not results: + raise NoImageRepositoriesFoundError() + + sample_repository_url = results["repository_url"] if not self._has_url_scheme(sample_repository_url): sample_repository_url = f"//{sample_repository_url}" return urlparse(sample_repository_url).netloc diff --git a/src/snowflake/cli/api/commands/snow_typer.py b/src/snowflake/cli/api/commands/snow_typer.py index 786bb6e776..2bc4bdd807 100644 --- a/src/snowflake/cli/api/commands/snow_typer.py +++ b/src/snowflake/cli/api/commands/snow_typer.py @@ -228,6 +228,7 @@ def __init__( short_help: Optional[str] = None, is_hidden: Optional[Callable[[], bool]] = None, deprecated: bool = False, + subcommand_metavar: Optional[str] = None, ): self.name = name self.help = help @@ -237,6 +238,7 @@ def __init__( self.commands_to_register: List[SnowTyperCommandData] = [] self.subapps_to_register: List[SnowTyperFactory] = [] self.callbacks_to_register: List[Callable] = [] + self.subcommand_metavar = subcommand_metavar def create_instance(self) -> SnowTyper: app = SnowTyper( @@ -245,6 +247,7 @@ def create_instance(self) -> SnowTyper: short_help=self.short_help, hidden=self.is_hidden() if self.is_hidden else False, deprecated=self.deprecated, + subcommand_metavar=self.subcommand_metavar, ) # register commands for command in self.commands_to_register: diff --git a/src/snowflake/cli/api/constants.py b/src/snowflake/cli/api/constants.py index 0c0674a9ad..b15112a7cf 100644 --- a/src/snowflake/cli/api/constants.py +++ b/src/snowflake/cli/api/constants.py @@ -35,6 +35,7 @@ def __str__(self): class ObjectType(Enum): COMPUTE_POOL = ObjectNames("compute-pool", "compute pool", "compute pools") + DBT_PROJECT = ObjectNames("dbt-project", "dbt project", "dbt projects") DATABASE = ObjectNames("database", "database", "databases") FUNCTION = ObjectNames("function", "function", "functions") INTEGRATION = ObjectNames("integration", "integration", "integrations") @@ -79,6 +80,7 @@ def __str__(self): ObjectType.APPLICATION.value.cli_name, ObjectType.APPLICATION_PACKAGE.value.cli_name, ObjectType.PROJECT.value.cli_name, + ObjectType.DBT_PROJECT.value.cli_name, } SUPPORTED_OBJECTS = sorted(OBJECT_TO_NAMES.keys() - UNSUPPORTED_OBJECTS) diff --git a/src/snowflake/cli/api/feature_flags.py b/src/snowflake/cli/api/feature_flags.py index df63155ceb..75080444ec 100644 --- a/src/snowflake/cli/api/feature_flags.py +++ b/src/snowflake/cli/api/feature_flags.py @@ -68,4 +68,5 @@ class FeatureFlag(FeatureFlagMixin): ) ENABLE_SNOWPARK_GLOB_SUPPORT = BooleanFlag("ENABLE_SNOWPARK_GLOB_SUPPORT", False) ENABLE_SPCS_SERVICE_EVENTS = BooleanFlag("ENABLE_SPCS_SERVICE_EVENTS", False) + ENABLE_DBT = BooleanFlag("ENABLE_DBT", False) ENABLE_AUTH_KEYPAIR = BooleanFlag("ENABLE_AUTH_KEYPAIR", False) diff --git a/src/snowflake/cli/api/secure_path.py b/src/snowflake/cli/api/secure_path.py index f920fe194c..0ffeada1bf 100644 --- a/src/snowflake/cli/api/secure_path.py +++ b/src/snowflake/cli/api/secure_path.py @@ -46,6 +46,11 @@ def __repr__(self): def __truediv__(self, key): return SecurePath(self._path / key) + def __eq__(self, other): + if isinstance(other, Path): + return self.path == other + return self.path == other.path + @property def path(self) -> Path: """ @@ -72,6 +77,10 @@ def absolute(self): """ return SecurePath(self._path.absolute()) + @classmethod + def cwd(cls) -> SecurePath: + return cls(Path.cwd()) + def resolve(self): """ Make the path absolute, resolving symlinks diff --git a/src/snowflake/cli/api/sql_execution.py b/src/snowflake/cli/api/sql_execution.py index d5038a9312..c533e3f176 100644 --- a/src/snowflake/cli/api/sql_execution.py +++ b/src/snowflake/cli/api/sql_execution.py @@ -87,20 +87,18 @@ def _execute_string( def execute_string(self, query: str, **kwargs) -> Iterable[SnowflakeCursor]: """Executes a single SQL query and returns the results""" - return self._execute_string(query, **kwargs) + return self._execute_string(dedent(query), **kwargs) def execute_query(self, query: str, **kwargs) -> SnowflakeCursor: """Executes a single SQL query and returns the last result""" - *_, last_result = list(self.execute_string(dedent(query), **kwargs)) + *_, last_result = list(self.execute_string(query, **kwargs)) return last_result def execute_queries(self, queries: str, **kwargs): """Executes multiple SQL queries (passed as one string) and returns the results as a list""" # Without remove_comments=True, connectors might throw an error if there is a comment at the end of the file - return list( - self.execute_string(dedent(queries), remove_comments=True, **kwargs) - ) + return list(self.execute_string(queries, remove_comments=True, **kwargs)) class SqlExecutor(BaseSqlExecutor): diff --git a/tests/__snapshots__/test_help_messages.ambr b/tests/__snapshots__/test_help_messages.ambr index 57f0e6eff5..5406935407 100644 --- a/tests/__snapshots__/test_help_messages.ambr +++ b/tests/__snapshots__/test_help_messages.ambr @@ -3964,6 +3964,1619 @@ +------------------------------------------------------------------------------+ + ''' +# --- +# name: test_help_messages[dbt.deploy] + ''' + + Usage: root dbt deploy [OPTIONS] NAME + + Copy dbt files and create or update dbt on Snowflake project. + + +- Arguments ------------------------------------------------------------------+ + | * name TEXT Identifier of the DBT Project; for example: my_pipeline | + | [required] | + +------------------------------------------------------------------------------+ + +- Options --------------------------------------------------------------------+ + | --source TEXT Path to directory containing dbt | + | files to deploy. Defaults to current | + | working directory. | + | --profiles-dir TEXT Path to directory containing | + | profiles.yml. Defaults to directory | + | provided in --source or current | + | working directory | + | --force --no-force Overwrites conflicting files in the | + | project, if any. | + | [default: no-force] | + | --help -h Show this message and exit. | + +------------------------------------------------------------------------------+ + +- Connection configuration ---------------------------------------------------+ + | --connection,--environment -c TEXT Name of the connection, as | + | defined in your config.toml | + | file. Default: default. | + | --host TEXT Host address for the | + | connection. Overrides the | + | value specified for the | + | connection. | + | --port INTEGER Port for the connection. | + | Overrides the value | + | specified for the | + | connection. | + | --account,--accountname TEXT Name assigned to your | + | Snowflake account. Overrides | + | the value specified for the | + | connection. | + | --user,--username TEXT Username to connect to | + | Snowflake. Overrides the | + | value specified for the | + | connection. | + | --password TEXT Snowflake password. | + | Overrides the value | + | specified for the | + | connection. | + | --authenticator TEXT Snowflake authenticator. | + | Overrides the value | + | specified for the | + | connection. | + | --private-key-file,--private… TEXT Snowflake private key file | + | path. Overrides the value | + | specified for the | + | connection. | + | --token-file-path TEXT Path to file with an OAuth | + | token to use when connecting | + | to Snowflake. | + | --database,--dbname TEXT Database to use. Overrides | + | the value specified for the | + | connection. | + | --schema,--schemaname TEXT Database schema to use. | + | Overrides the value | + | specified for the | + | connection. | + | --role,--rolename TEXT Role to use. Overrides the | + | value specified for the | + | connection. | + | --warehouse TEXT Warehouse to use. Overrides | + | the value specified for the | + | connection. | + | --temporary-connection -x Uses a connection defined | + | with command line | + | parameters, instead of one | + | defined in config | + | --mfa-passcode TEXT Token to use for | + | multi-factor authentication | + | (MFA) | + | --enable-diag Whether to generate a | + | connection diagnostic | + | report. | + | --diag-log-path TEXT Path for the generated | + | report. Defaults to system | + | temporary directory. | + | --diag-allowlist-path TEXT Path to a JSON file that | + | contains allowlist | + | parameters. | + +------------------------------------------------------------------------------+ + +- Global configuration -------------------------------------------------------+ + | --format [TABLE|JSON] Specifies the output format. | + | [default: TABLE] | + | --verbose -v Displays log entries for log | + | levels info and higher. | + | --debug Displays log entries for log | + | levels debug and higher; debug | + | logs contain additional | + | information. | + | --silent Turns off intermediate output | + | to console. | + | --enhanced-exit-codes Differentiate exit error codes | + | based on failure type. | + | [env var: | + | SNOWFLAKE_ENHANCED_EXIT_CODES] | + +------------------------------------------------------------------------------+ + + + ''' +# --- +# name: test_help_messages[dbt.execute.build] + ''' + + Usage: root dbt execute [OPTIONS] NAME DBT_COMMAND + + Execute a dbt command on Snowflake + + +- Arguments ------------------------------------------------------------------+ + | * name TEXT Identifier of the DBT Project; for example: my_pipeline | + | [required] | + +------------------------------------------------------------------------------+ + +- Options --------------------------------------------------------------------+ + | --run-async --no-run-async Run dbt command asynchronously and | + | check it's result later. | + | [default: no-run-async] | + | --help -h Show this message and exit. | + +------------------------------------------------------------------------------+ + +- Connection configuration ---------------------------------------------------+ + | --connection,--environment -c TEXT Name of the connection, as | + | defined in your config.toml | + | file. Default: default. | + | --host TEXT Host address for the | + | connection. Overrides the | + | value specified for the | + | connection. | + | --port INTEGER Port for the connection. | + | Overrides the value | + | specified for the | + | connection. | + | --account,--accountname TEXT Name assigned to your | + | Snowflake account. Overrides | + | the value specified for the | + | connection. | + | --user,--username TEXT Username to connect to | + | Snowflake. Overrides the | + | value specified for the | + | connection. | + | --password TEXT Snowflake password. | + | Overrides the value | + | specified for the | + | connection. | + | --authenticator TEXT Snowflake authenticator. | + | Overrides the value | + | specified for the | + | connection. | + | --private-key-file,--private… TEXT Snowflake private key file | + | path. Overrides the value | + | specified for the | + | connection. | + | --token-file-path TEXT Path to file with an OAuth | + | token to use when connecting | + | to Snowflake. | + | --database,--dbname TEXT Database to use. Overrides | + | the value specified for the | + | connection. | + | --schema,--schemaname TEXT Database schema to use. | + | Overrides the value | + | specified for the | + | connection. | + | --role,--rolename TEXT Role to use. Overrides the | + | value specified for the | + | connection. | + | --warehouse TEXT Warehouse to use. Overrides | + | the value specified for the | + | connection. | + | --temporary-connection -x Uses a connection defined | + | with command line | + | parameters, instead of one | + | defined in config | + | --mfa-passcode TEXT Token to use for | + | multi-factor authentication | + | (MFA) | + | --enable-diag Whether to generate a | + | connection diagnostic | + | report. | + | --diag-log-path TEXT Path for the generated | + | report. Defaults to system | + | temporary directory. | + | --diag-allowlist-path TEXT Path to a JSON file that | + | contains allowlist | + | parameters. | + +------------------------------------------------------------------------------+ + +- Global configuration -------------------------------------------------------+ + | --format [TABLE|JSON] Specifies the output format. | + | [default: TABLE] | + | --verbose -v Displays log entries for log | + | levels info and higher. | + | --debug Displays log entries for log | + | levels debug and higher; debug | + | logs contain additional | + | information. | + | --silent Turns off intermediate output | + | to console. | + | --enhanced-exit-codes Differentiate exit error codes | + | based on failure type. | + | [env var: | + | SNOWFLAKE_ENHANCED_EXIT_CODES] | + +------------------------------------------------------------------------------+ + +- Commands -------------------------------------------------------------------+ + | build Execute build command on Snowflake. | + | compile Execute compile command on Snowflake. | + | deps Execute deps command on Snowflake. | + | list Execute list command on Snowflake. | + | parse Execute parse command on Snowflake. | + | run Execute run command on Snowflake. | + | run-operation Execute run-operation command on Snowflake. | + | seed Execute seed command on Snowflake. | + | show Execute show command on Snowflake. | + | snapshot Execute snapshot command on Snowflake. | + | test Execute test command on Snowflake. | + +------------------------------------------------------------------------------+ + + + ''' +# --- +# name: test_help_messages[dbt.execute.compile] + ''' + + Usage: root dbt execute [OPTIONS] NAME DBT_COMMAND + + Execute a dbt command on Snowflake + + +- Arguments ------------------------------------------------------------------+ + | * name TEXT Identifier of the DBT Project; for example: my_pipeline | + | [required] | + +------------------------------------------------------------------------------+ + +- Options --------------------------------------------------------------------+ + | --run-async --no-run-async Run dbt command asynchronously and | + | check it's result later. | + | [default: no-run-async] | + | --help -h Show this message and exit. | + +------------------------------------------------------------------------------+ + +- Connection configuration ---------------------------------------------------+ + | --connection,--environment -c TEXT Name of the connection, as | + | defined in your config.toml | + | file. Default: default. | + | --host TEXT Host address for the | + | connection. Overrides the | + | value specified for the | + | connection. | + | --port INTEGER Port for the connection. | + | Overrides the value | + | specified for the | + | connection. | + | --account,--accountname TEXT Name assigned to your | + | Snowflake account. Overrides | + | the value specified for the | + | connection. | + | --user,--username TEXT Username to connect to | + | Snowflake. Overrides the | + | value specified for the | + | connection. | + | --password TEXT Snowflake password. | + | Overrides the value | + | specified for the | + | connection. | + | --authenticator TEXT Snowflake authenticator. | + | Overrides the value | + | specified for the | + | connection. | + | --private-key-file,--private… TEXT Snowflake private key file | + | path. Overrides the value | + | specified for the | + | connection. | + | --token-file-path TEXT Path to file with an OAuth | + | token to use when connecting | + | to Snowflake. | + | --database,--dbname TEXT Database to use. Overrides | + | the value specified for the | + | connection. | + | --schema,--schemaname TEXT Database schema to use. | + | Overrides the value | + | specified for the | + | connection. | + | --role,--rolename TEXT Role to use. Overrides the | + | value specified for the | + | connection. | + | --warehouse TEXT Warehouse to use. Overrides | + | the value specified for the | + | connection. | + | --temporary-connection -x Uses a connection defined | + | with command line | + | parameters, instead of one | + | defined in config | + | --mfa-passcode TEXT Token to use for | + | multi-factor authentication | + | (MFA) | + | --enable-diag Whether to generate a | + | connection diagnostic | + | report. | + | --diag-log-path TEXT Path for the generated | + | report. Defaults to system | + | temporary directory. | + | --diag-allowlist-path TEXT Path to a JSON file that | + | contains allowlist | + | parameters. | + +------------------------------------------------------------------------------+ + +- Global configuration -------------------------------------------------------+ + | --format [TABLE|JSON] Specifies the output format. | + | [default: TABLE] | + | --verbose -v Displays log entries for log | + | levels info and higher. | + | --debug Displays log entries for log | + | levels debug and higher; debug | + | logs contain additional | + | information. | + | --silent Turns off intermediate output | + | to console. | + | --enhanced-exit-codes Differentiate exit error codes | + | based on failure type. | + | [env var: | + | SNOWFLAKE_ENHANCED_EXIT_CODES] | + +------------------------------------------------------------------------------+ + +- Commands -------------------------------------------------------------------+ + | build Execute build command on Snowflake. | + | compile Execute compile command on Snowflake. | + | deps Execute deps command on Snowflake. | + | list Execute list command on Snowflake. | + | parse Execute parse command on Snowflake. | + | run Execute run command on Snowflake. | + | run-operation Execute run-operation command on Snowflake. | + | seed Execute seed command on Snowflake. | + | show Execute show command on Snowflake. | + | snapshot Execute snapshot command on Snowflake. | + | test Execute test command on Snowflake. | + +------------------------------------------------------------------------------+ + + + ''' +# --- +# name: test_help_messages[dbt.execute.deps] + ''' + + Usage: root dbt execute [OPTIONS] NAME DBT_COMMAND + + Execute a dbt command on Snowflake + + +- Arguments ------------------------------------------------------------------+ + | * name TEXT Identifier of the DBT Project; for example: my_pipeline | + | [required] | + +------------------------------------------------------------------------------+ + +- Options --------------------------------------------------------------------+ + | --run-async --no-run-async Run dbt command asynchronously and | + | check it's result later. | + | [default: no-run-async] | + | --help -h Show this message and exit. | + +------------------------------------------------------------------------------+ + +- Connection configuration ---------------------------------------------------+ + | --connection,--environment -c TEXT Name of the connection, as | + | defined in your config.toml | + | file. Default: default. | + | --host TEXT Host address for the | + | connection. Overrides the | + | value specified for the | + | connection. | + | --port INTEGER Port for the connection. | + | Overrides the value | + | specified for the | + | connection. | + | --account,--accountname TEXT Name assigned to your | + | Snowflake account. Overrides | + | the value specified for the | + | connection. | + | --user,--username TEXT Username to connect to | + | Snowflake. Overrides the | + | value specified for the | + | connection. | + | --password TEXT Snowflake password. | + | Overrides the value | + | specified for the | + | connection. | + | --authenticator TEXT Snowflake authenticator. | + | Overrides the value | + | specified for the | + | connection. | + | --private-key-file,--private… TEXT Snowflake private key file | + | path. Overrides the value | + | specified for the | + | connection. | + | --token-file-path TEXT Path to file with an OAuth | + | token to use when connecting | + | to Snowflake. | + | --database,--dbname TEXT Database to use. Overrides | + | the value specified for the | + | connection. | + | --schema,--schemaname TEXT Database schema to use. | + | Overrides the value | + | specified for the | + | connection. | + | --role,--rolename TEXT Role to use. Overrides the | + | value specified for the | + | connection. | + | --warehouse TEXT Warehouse to use. Overrides | + | the value specified for the | + | connection. | + | --temporary-connection -x Uses a connection defined | + | with command line | + | parameters, instead of one | + | defined in config | + | --mfa-passcode TEXT Token to use for | + | multi-factor authentication | + | (MFA) | + | --enable-diag Whether to generate a | + | connection diagnostic | + | report. | + | --diag-log-path TEXT Path for the generated | + | report. Defaults to system | + | temporary directory. | + | --diag-allowlist-path TEXT Path to a JSON file that | + | contains allowlist | + | parameters. | + +------------------------------------------------------------------------------+ + +- Global configuration -------------------------------------------------------+ + | --format [TABLE|JSON] Specifies the output format. | + | [default: TABLE] | + | --verbose -v Displays log entries for log | + | levels info and higher. | + | --debug Displays log entries for log | + | levels debug and higher; debug | + | logs contain additional | + | information. | + | --silent Turns off intermediate output | + | to console. | + | --enhanced-exit-codes Differentiate exit error codes | + | based on failure type. | + | [env var: | + | SNOWFLAKE_ENHANCED_EXIT_CODES] | + +------------------------------------------------------------------------------+ + +- Commands -------------------------------------------------------------------+ + | build Execute build command on Snowflake. | + | compile Execute compile command on Snowflake. | + | deps Execute deps command on Snowflake. | + | list Execute list command on Snowflake. | + | parse Execute parse command on Snowflake. | + | run Execute run command on Snowflake. | + | run-operation Execute run-operation command on Snowflake. | + | seed Execute seed command on Snowflake. | + | show Execute show command on Snowflake. | + | snapshot Execute snapshot command on Snowflake. | + | test Execute test command on Snowflake. | + +------------------------------------------------------------------------------+ + + + ''' +# --- +# name: test_help_messages[dbt.execute.list] + ''' + + Usage: root dbt execute [OPTIONS] NAME DBT_COMMAND + + Execute a dbt command on Snowflake + + +- Arguments ------------------------------------------------------------------+ + | * name TEXT Identifier of the DBT Project; for example: my_pipeline | + | [required] | + +------------------------------------------------------------------------------+ + +- Options --------------------------------------------------------------------+ + | --run-async --no-run-async Run dbt command asynchronously and | + | check it's result later. | + | [default: no-run-async] | + | --help -h Show this message and exit. | + +------------------------------------------------------------------------------+ + +- Connection configuration ---------------------------------------------------+ + | --connection,--environment -c TEXT Name of the connection, as | + | defined in your config.toml | + | file. Default: default. | + | --host TEXT Host address for the | + | connection. Overrides the | + | value specified for the | + | connection. | + | --port INTEGER Port for the connection. | + | Overrides the value | + | specified for the | + | connection. | + | --account,--accountname TEXT Name assigned to your | + | Snowflake account. Overrides | + | the value specified for the | + | connection. | + | --user,--username TEXT Username to connect to | + | Snowflake. Overrides the | + | value specified for the | + | connection. | + | --password TEXT Snowflake password. | + | Overrides the value | + | specified for the | + | connection. | + | --authenticator TEXT Snowflake authenticator. | + | Overrides the value | + | specified for the | + | connection. | + | --private-key-file,--private… TEXT Snowflake private key file | + | path. Overrides the value | + | specified for the | + | connection. | + | --token-file-path TEXT Path to file with an OAuth | + | token to use when connecting | + | to Snowflake. | + | --database,--dbname TEXT Database to use. Overrides | + | the value specified for the | + | connection. | + | --schema,--schemaname TEXT Database schema to use. | + | Overrides the value | + | specified for the | + | connection. | + | --role,--rolename TEXT Role to use. Overrides the | + | value specified for the | + | connection. | + | --warehouse TEXT Warehouse to use. Overrides | + | the value specified for the | + | connection. | + | --temporary-connection -x Uses a connection defined | + | with command line | + | parameters, instead of one | + | defined in config | + | --mfa-passcode TEXT Token to use for | + | multi-factor authentication | + | (MFA) | + | --enable-diag Whether to generate a | + | connection diagnostic | + | report. | + | --diag-log-path TEXT Path for the generated | + | report. Defaults to system | + | temporary directory. | + | --diag-allowlist-path TEXT Path to a JSON file that | + | contains allowlist | + | parameters. | + +------------------------------------------------------------------------------+ + +- Global configuration -------------------------------------------------------+ + | --format [TABLE|JSON] Specifies the output format. | + | [default: TABLE] | + | --verbose -v Displays log entries for log | + | levels info and higher. | + | --debug Displays log entries for log | + | levels debug and higher; debug | + | logs contain additional | + | information. | + | --silent Turns off intermediate output | + | to console. | + | --enhanced-exit-codes Differentiate exit error codes | + | based on failure type. | + | [env var: | + | SNOWFLAKE_ENHANCED_EXIT_CODES] | + +------------------------------------------------------------------------------+ + +- Commands -------------------------------------------------------------------+ + | build Execute build command on Snowflake. | + | compile Execute compile command on Snowflake. | + | deps Execute deps command on Snowflake. | + | list Execute list command on Snowflake. | + | parse Execute parse command on Snowflake. | + | run Execute run command on Snowflake. | + | run-operation Execute run-operation command on Snowflake. | + | seed Execute seed command on Snowflake. | + | show Execute show command on Snowflake. | + | snapshot Execute snapshot command on Snowflake. | + | test Execute test command on Snowflake. | + +------------------------------------------------------------------------------+ + + + ''' +# --- +# name: test_help_messages[dbt.execute.parse] + ''' + + Usage: root dbt execute [OPTIONS] NAME DBT_COMMAND + + Execute a dbt command on Snowflake + + +- Arguments ------------------------------------------------------------------+ + | * name TEXT Identifier of the DBT Project; for example: my_pipeline | + | [required] | + +------------------------------------------------------------------------------+ + +- Options --------------------------------------------------------------------+ + | --run-async --no-run-async Run dbt command asynchronously and | + | check it's result later. | + | [default: no-run-async] | + | --help -h Show this message and exit. | + +------------------------------------------------------------------------------+ + +- Connection configuration ---------------------------------------------------+ + | --connection,--environment -c TEXT Name of the connection, as | + | defined in your config.toml | + | file. Default: default. | + | --host TEXT Host address for the | + | connection. Overrides the | + | value specified for the | + | connection. | + | --port INTEGER Port for the connection. | + | Overrides the value | + | specified for the | + | connection. | + | --account,--accountname TEXT Name assigned to your | + | Snowflake account. Overrides | + | the value specified for the | + | connection. | + | --user,--username TEXT Username to connect to | + | Snowflake. Overrides the | + | value specified for the | + | connection. | + | --password TEXT Snowflake password. | + | Overrides the value | + | specified for the | + | connection. | + | --authenticator TEXT Snowflake authenticator. | + | Overrides the value | + | specified for the | + | connection. | + | --private-key-file,--private… TEXT Snowflake private key file | + | path. Overrides the value | + | specified for the | + | connection. | + | --token-file-path TEXT Path to file with an OAuth | + | token to use when connecting | + | to Snowflake. | + | --database,--dbname TEXT Database to use. Overrides | + | the value specified for the | + | connection. | + | --schema,--schemaname TEXT Database schema to use. | + | Overrides the value | + | specified for the | + | connection. | + | --role,--rolename TEXT Role to use. Overrides the | + | value specified for the | + | connection. | + | --warehouse TEXT Warehouse to use. Overrides | + | the value specified for the | + | connection. | + | --temporary-connection -x Uses a connection defined | + | with command line | + | parameters, instead of one | + | defined in config | + | --mfa-passcode TEXT Token to use for | + | multi-factor authentication | + | (MFA) | + | --enable-diag Whether to generate a | + | connection diagnostic | + | report. | + | --diag-log-path TEXT Path for the generated | + | report. Defaults to system | + | temporary directory. | + | --diag-allowlist-path TEXT Path to a JSON file that | + | contains allowlist | + | parameters. | + +------------------------------------------------------------------------------+ + +- Global configuration -------------------------------------------------------+ + | --format [TABLE|JSON] Specifies the output format. | + | [default: TABLE] | + | --verbose -v Displays log entries for log | + | levels info and higher. | + | --debug Displays log entries for log | + | levels debug and higher; debug | + | logs contain additional | + | information. | + | --silent Turns off intermediate output | + | to console. | + | --enhanced-exit-codes Differentiate exit error codes | + | based on failure type. | + | [env var: | + | SNOWFLAKE_ENHANCED_EXIT_CODES] | + +------------------------------------------------------------------------------+ + +- Commands -------------------------------------------------------------------+ + | build Execute build command on Snowflake. | + | compile Execute compile command on Snowflake. | + | deps Execute deps command on Snowflake. | + | list Execute list command on Snowflake. | + | parse Execute parse command on Snowflake. | + | run Execute run command on Snowflake. | + | run-operation Execute run-operation command on Snowflake. | + | seed Execute seed command on Snowflake. | + | show Execute show command on Snowflake. | + | snapshot Execute snapshot command on Snowflake. | + | test Execute test command on Snowflake. | + +------------------------------------------------------------------------------+ + + + ''' +# --- +# name: test_help_messages[dbt.execute.run-operation] + ''' + + Usage: root dbt execute [OPTIONS] NAME DBT_COMMAND + + Execute a dbt command on Snowflake + + +- Arguments ------------------------------------------------------------------+ + | * name TEXT Identifier of the DBT Project; for example: my_pipeline | + | [required] | + +------------------------------------------------------------------------------+ + +- Options --------------------------------------------------------------------+ + | --run-async --no-run-async Run dbt command asynchronously and | + | check it's result later. | + | [default: no-run-async] | + | --help -h Show this message and exit. | + +------------------------------------------------------------------------------+ + +- Connection configuration ---------------------------------------------------+ + | --connection,--environment -c TEXT Name of the connection, as | + | defined in your config.toml | + | file. Default: default. | + | --host TEXT Host address for the | + | connection. Overrides the | + | value specified for the | + | connection. | + | --port INTEGER Port for the connection. | + | Overrides the value | + | specified for the | + | connection. | + | --account,--accountname TEXT Name assigned to your | + | Snowflake account. Overrides | + | the value specified for the | + | connection. | + | --user,--username TEXT Username to connect to | + | Snowflake. Overrides the | + | value specified for the | + | connection. | + | --password TEXT Snowflake password. | + | Overrides the value | + | specified for the | + | connection. | + | --authenticator TEXT Snowflake authenticator. | + | Overrides the value | + | specified for the | + | connection. | + | --private-key-file,--private… TEXT Snowflake private key file | + | path. Overrides the value | + | specified for the | + | connection. | + | --token-file-path TEXT Path to file with an OAuth | + | token to use when connecting | + | to Snowflake. | + | --database,--dbname TEXT Database to use. Overrides | + | the value specified for the | + | connection. | + | --schema,--schemaname TEXT Database schema to use. | + | Overrides the value | + | specified for the | + | connection. | + | --role,--rolename TEXT Role to use. Overrides the | + | value specified for the | + | connection. | + | --warehouse TEXT Warehouse to use. Overrides | + | the value specified for the | + | connection. | + | --temporary-connection -x Uses a connection defined | + | with command line | + | parameters, instead of one | + | defined in config | + | --mfa-passcode TEXT Token to use for | + | multi-factor authentication | + | (MFA) | + | --enable-diag Whether to generate a | + | connection diagnostic | + | report. | + | --diag-log-path TEXT Path for the generated | + | report. Defaults to system | + | temporary directory. | + | --diag-allowlist-path TEXT Path to a JSON file that | + | contains allowlist | + | parameters. | + +------------------------------------------------------------------------------+ + +- Global configuration -------------------------------------------------------+ + | --format [TABLE|JSON] Specifies the output format. | + | [default: TABLE] | + | --verbose -v Displays log entries for log | + | levels info and higher. | + | --debug Displays log entries for log | + | levels debug and higher; debug | + | logs contain additional | + | information. | + | --silent Turns off intermediate output | + | to console. | + | --enhanced-exit-codes Differentiate exit error codes | + | based on failure type. | + | [env var: | + | SNOWFLAKE_ENHANCED_EXIT_CODES] | + +------------------------------------------------------------------------------+ + +- Commands -------------------------------------------------------------------+ + | build Execute build command on Snowflake. | + | compile Execute compile command on Snowflake. | + | deps Execute deps command on Snowflake. | + | list Execute list command on Snowflake. | + | parse Execute parse command on Snowflake. | + | run Execute run command on Snowflake. | + | run-operation Execute run-operation command on Snowflake. | + | seed Execute seed command on Snowflake. | + | show Execute show command on Snowflake. | + | snapshot Execute snapshot command on Snowflake. | + | test Execute test command on Snowflake. | + +------------------------------------------------------------------------------+ + + + ''' +# --- +# name: test_help_messages[dbt.execute.run] + ''' + + Usage: root dbt execute [OPTIONS] NAME DBT_COMMAND + + Execute a dbt command on Snowflake + + +- Arguments ------------------------------------------------------------------+ + | * name TEXT Identifier of the DBT Project; for example: my_pipeline | + | [required] | + +------------------------------------------------------------------------------+ + +- Options --------------------------------------------------------------------+ + | --run-async --no-run-async Run dbt command asynchronously and | + | check it's result later. | + | [default: no-run-async] | + | --help -h Show this message and exit. | + +------------------------------------------------------------------------------+ + +- Connection configuration ---------------------------------------------------+ + | --connection,--environment -c TEXT Name of the connection, as | + | defined in your config.toml | + | file. Default: default. | + | --host TEXT Host address for the | + | connection. Overrides the | + | value specified for the | + | connection. | + | --port INTEGER Port for the connection. | + | Overrides the value | + | specified for the | + | connection. | + | --account,--accountname TEXT Name assigned to your | + | Snowflake account. Overrides | + | the value specified for the | + | connection. | + | --user,--username TEXT Username to connect to | + | Snowflake. Overrides the | + | value specified for the | + | connection. | + | --password TEXT Snowflake password. | + | Overrides the value | + | specified for the | + | connection. | + | --authenticator TEXT Snowflake authenticator. | + | Overrides the value | + | specified for the | + | connection. | + | --private-key-file,--private… TEXT Snowflake private key file | + | path. Overrides the value | + | specified for the | + | connection. | + | --token-file-path TEXT Path to file with an OAuth | + | token to use when connecting | + | to Snowflake. | + | --database,--dbname TEXT Database to use. Overrides | + | the value specified for the | + | connection. | + | --schema,--schemaname TEXT Database schema to use. | + | Overrides the value | + | specified for the | + | connection. | + | --role,--rolename TEXT Role to use. Overrides the | + | value specified for the | + | connection. | + | --warehouse TEXT Warehouse to use. Overrides | + | the value specified for the | + | connection. | + | --temporary-connection -x Uses a connection defined | + | with command line | + | parameters, instead of one | + | defined in config | + | --mfa-passcode TEXT Token to use for | + | multi-factor authentication | + | (MFA) | + | --enable-diag Whether to generate a | + | connection diagnostic | + | report. | + | --diag-log-path TEXT Path for the generated | + | report. Defaults to system | + | temporary directory. | + | --diag-allowlist-path TEXT Path to a JSON file that | + | contains allowlist | + | parameters. | + +------------------------------------------------------------------------------+ + +- Global configuration -------------------------------------------------------+ + | --format [TABLE|JSON] Specifies the output format. | + | [default: TABLE] | + | --verbose -v Displays log entries for log | + | levels info and higher. | + | --debug Displays log entries for log | + | levels debug and higher; debug | + | logs contain additional | + | information. | + | --silent Turns off intermediate output | + | to console. | + | --enhanced-exit-codes Differentiate exit error codes | + | based on failure type. | + | [env var: | + | SNOWFLAKE_ENHANCED_EXIT_CODES] | + +------------------------------------------------------------------------------+ + +- Commands -------------------------------------------------------------------+ + | build Execute build command on Snowflake. | + | compile Execute compile command on Snowflake. | + | deps Execute deps command on Snowflake. | + | list Execute list command on Snowflake. | + | parse Execute parse command on Snowflake. | + | run Execute run command on Snowflake. | + | run-operation Execute run-operation command on Snowflake. | + | seed Execute seed command on Snowflake. | + | show Execute show command on Snowflake. | + | snapshot Execute snapshot command on Snowflake. | + | test Execute test command on Snowflake. | + +------------------------------------------------------------------------------+ + + + ''' +# --- +# name: test_help_messages[dbt.execute.seed] + ''' + + Usage: root dbt execute [OPTIONS] NAME DBT_COMMAND + + Execute a dbt command on Snowflake + + +- Arguments ------------------------------------------------------------------+ + | * name TEXT Identifier of the DBT Project; for example: my_pipeline | + | [required] | + +------------------------------------------------------------------------------+ + +- Options --------------------------------------------------------------------+ + | --run-async --no-run-async Run dbt command asynchronously and | + | check it's result later. | + | [default: no-run-async] | + | --help -h Show this message and exit. | + +------------------------------------------------------------------------------+ + +- Connection configuration ---------------------------------------------------+ + | --connection,--environment -c TEXT Name of the connection, as | + | defined in your config.toml | + | file. Default: default. | + | --host TEXT Host address for the | + | connection. Overrides the | + | value specified for the | + | connection. | + | --port INTEGER Port for the connection. | + | Overrides the value | + | specified for the | + | connection. | + | --account,--accountname TEXT Name assigned to your | + | Snowflake account. Overrides | + | the value specified for the | + | connection. | + | --user,--username TEXT Username to connect to | + | Snowflake. Overrides the | + | value specified for the | + | connection. | + | --password TEXT Snowflake password. | + | Overrides the value | + | specified for the | + | connection. | + | --authenticator TEXT Snowflake authenticator. | + | Overrides the value | + | specified for the | + | connection. | + | --private-key-file,--private… TEXT Snowflake private key file | + | path. Overrides the value | + | specified for the | + | connection. | + | --token-file-path TEXT Path to file with an OAuth | + | token to use when connecting | + | to Snowflake. | + | --database,--dbname TEXT Database to use. Overrides | + | the value specified for the | + | connection. | + | --schema,--schemaname TEXT Database schema to use. | + | Overrides the value | + | specified for the | + | connection. | + | --role,--rolename TEXT Role to use. Overrides the | + | value specified for the | + | connection. | + | --warehouse TEXT Warehouse to use. Overrides | + | the value specified for the | + | connection. | + | --temporary-connection -x Uses a connection defined | + | with command line | + | parameters, instead of one | + | defined in config | + | --mfa-passcode TEXT Token to use for | + | multi-factor authentication | + | (MFA) | + | --enable-diag Whether to generate a | + | connection diagnostic | + | report. | + | --diag-log-path TEXT Path for the generated | + | report. Defaults to system | + | temporary directory. | + | --diag-allowlist-path TEXT Path to a JSON file that | + | contains allowlist | + | parameters. | + +------------------------------------------------------------------------------+ + +- Global configuration -------------------------------------------------------+ + | --format [TABLE|JSON] Specifies the output format. | + | [default: TABLE] | + | --verbose -v Displays log entries for log | + | levels info and higher. | + | --debug Displays log entries for log | + | levels debug and higher; debug | + | logs contain additional | + | information. | + | --silent Turns off intermediate output | + | to console. | + | --enhanced-exit-codes Differentiate exit error codes | + | based on failure type. | + | [env var: | + | SNOWFLAKE_ENHANCED_EXIT_CODES] | + +------------------------------------------------------------------------------+ + +- Commands -------------------------------------------------------------------+ + | build Execute build command on Snowflake. | + | compile Execute compile command on Snowflake. | + | deps Execute deps command on Snowflake. | + | list Execute list command on Snowflake. | + | parse Execute parse command on Snowflake. | + | run Execute run command on Snowflake. | + | run-operation Execute run-operation command on Snowflake. | + | seed Execute seed command on Snowflake. | + | show Execute show command on Snowflake. | + | snapshot Execute snapshot command on Snowflake. | + | test Execute test command on Snowflake. | + +------------------------------------------------------------------------------+ + + + ''' +# --- +# name: test_help_messages[dbt.execute.show] + ''' + + Usage: root dbt execute [OPTIONS] NAME DBT_COMMAND + + Execute a dbt command on Snowflake + + +- Arguments ------------------------------------------------------------------+ + | * name TEXT Identifier of the DBT Project; for example: my_pipeline | + | [required] | + +------------------------------------------------------------------------------+ + +- Options --------------------------------------------------------------------+ + | --run-async --no-run-async Run dbt command asynchronously and | + | check it's result later. | + | [default: no-run-async] | + | --help -h Show this message and exit. | + +------------------------------------------------------------------------------+ + +- Connection configuration ---------------------------------------------------+ + | --connection,--environment -c TEXT Name of the connection, as | + | defined in your config.toml | + | file. Default: default. | + | --host TEXT Host address for the | + | connection. Overrides the | + | value specified for the | + | connection. | + | --port INTEGER Port for the connection. | + | Overrides the value | + | specified for the | + | connection. | + | --account,--accountname TEXT Name assigned to your | + | Snowflake account. Overrides | + | the value specified for the | + | connection. | + | --user,--username TEXT Username to connect to | + | Snowflake. Overrides the | + | value specified for the | + | connection. | + | --password TEXT Snowflake password. | + | Overrides the value | + | specified for the | + | connection. | + | --authenticator TEXT Snowflake authenticator. | + | Overrides the value | + | specified for the | + | connection. | + | --private-key-file,--private… TEXT Snowflake private key file | + | path. Overrides the value | + | specified for the | + | connection. | + | --token-file-path TEXT Path to file with an OAuth | + | token to use when connecting | + | to Snowflake. | + | --database,--dbname TEXT Database to use. Overrides | + | the value specified for the | + | connection. | + | --schema,--schemaname TEXT Database schema to use. | + | Overrides the value | + | specified for the | + | connection. | + | --role,--rolename TEXT Role to use. Overrides the | + | value specified for the | + | connection. | + | --warehouse TEXT Warehouse to use. Overrides | + | the value specified for the | + | connection. | + | --temporary-connection -x Uses a connection defined | + | with command line | + | parameters, instead of one | + | defined in config | + | --mfa-passcode TEXT Token to use for | + | multi-factor authentication | + | (MFA) | + | --enable-diag Whether to generate a | + | connection diagnostic | + | report. | + | --diag-log-path TEXT Path for the generated | + | report. Defaults to system | + | temporary directory. | + | --diag-allowlist-path TEXT Path to a JSON file that | + | contains allowlist | + | parameters. | + +------------------------------------------------------------------------------+ + +- Global configuration -------------------------------------------------------+ + | --format [TABLE|JSON] Specifies the output format. | + | [default: TABLE] | + | --verbose -v Displays log entries for log | + | levels info and higher. | + | --debug Displays log entries for log | + | levels debug and higher; debug | + | logs contain additional | + | information. | + | --silent Turns off intermediate output | + | to console. | + | --enhanced-exit-codes Differentiate exit error codes | + | based on failure type. | + | [env var: | + | SNOWFLAKE_ENHANCED_EXIT_CODES] | + +------------------------------------------------------------------------------+ + +- Commands -------------------------------------------------------------------+ + | build Execute build command on Snowflake. | + | compile Execute compile command on Snowflake. | + | deps Execute deps command on Snowflake. | + | list Execute list command on Snowflake. | + | parse Execute parse command on Snowflake. | + | run Execute run command on Snowflake. | + | run-operation Execute run-operation command on Snowflake. | + | seed Execute seed command on Snowflake. | + | show Execute show command on Snowflake. | + | snapshot Execute snapshot command on Snowflake. | + | test Execute test command on Snowflake. | + +------------------------------------------------------------------------------+ + + + ''' +# --- +# name: test_help_messages[dbt.execute.snapshot] + ''' + + Usage: root dbt execute [OPTIONS] NAME DBT_COMMAND + + Execute a dbt command on Snowflake + + +- Arguments ------------------------------------------------------------------+ + | * name TEXT Identifier of the DBT Project; for example: my_pipeline | + | [required] | + +------------------------------------------------------------------------------+ + +- Options --------------------------------------------------------------------+ + | --run-async --no-run-async Run dbt command asynchronously and | + | check it's result later. | + | [default: no-run-async] | + | --help -h Show this message and exit. | + +------------------------------------------------------------------------------+ + +- Connection configuration ---------------------------------------------------+ + | --connection,--environment -c TEXT Name of the connection, as | + | defined in your config.toml | + | file. Default: default. | + | --host TEXT Host address for the | + | connection. Overrides the | + | value specified for the | + | connection. | + | --port INTEGER Port for the connection. | + | Overrides the value | + | specified for the | + | connection. | + | --account,--accountname TEXT Name assigned to your | + | Snowflake account. Overrides | + | the value specified for the | + | connection. | + | --user,--username TEXT Username to connect to | + | Snowflake. Overrides the | + | value specified for the | + | connection. | + | --password TEXT Snowflake password. | + | Overrides the value | + | specified for the | + | connection. | + | --authenticator TEXT Snowflake authenticator. | + | Overrides the value | + | specified for the | + | connection. | + | --private-key-file,--private… TEXT Snowflake private key file | + | path. Overrides the value | + | specified for the | + | connection. | + | --token-file-path TEXT Path to file with an OAuth | + | token to use when connecting | + | to Snowflake. | + | --database,--dbname TEXT Database to use. Overrides | + | the value specified for the | + | connection. | + | --schema,--schemaname TEXT Database schema to use. | + | Overrides the value | + | specified for the | + | connection. | + | --role,--rolename TEXT Role to use. Overrides the | + | value specified for the | + | connection. | + | --warehouse TEXT Warehouse to use. Overrides | + | the value specified for the | + | connection. | + | --temporary-connection -x Uses a connection defined | + | with command line | + | parameters, instead of one | + | defined in config | + | --mfa-passcode TEXT Token to use for | + | multi-factor authentication | + | (MFA) | + | --enable-diag Whether to generate a | + | connection diagnostic | + | report. | + | --diag-log-path TEXT Path for the generated | + | report. Defaults to system | + | temporary directory. | + | --diag-allowlist-path TEXT Path to a JSON file that | + | contains allowlist | + | parameters. | + +------------------------------------------------------------------------------+ + +- Global configuration -------------------------------------------------------+ + | --format [TABLE|JSON] Specifies the output format. | + | [default: TABLE] | + | --verbose -v Displays log entries for log | + | levels info and higher. | + | --debug Displays log entries for log | + | levels debug and higher; debug | + | logs contain additional | + | information. | + | --silent Turns off intermediate output | + | to console. | + | --enhanced-exit-codes Differentiate exit error codes | + | based on failure type. | + | [env var: | + | SNOWFLAKE_ENHANCED_EXIT_CODES] | + +------------------------------------------------------------------------------+ + +- Commands -------------------------------------------------------------------+ + | build Execute build command on Snowflake. | + | compile Execute compile command on Snowflake. | + | deps Execute deps command on Snowflake. | + | list Execute list command on Snowflake. | + | parse Execute parse command on Snowflake. | + | run Execute run command on Snowflake. | + | run-operation Execute run-operation command on Snowflake. | + | seed Execute seed command on Snowflake. | + | show Execute show command on Snowflake. | + | snapshot Execute snapshot command on Snowflake. | + | test Execute test command on Snowflake. | + +------------------------------------------------------------------------------+ + + + ''' +# --- +# name: test_help_messages[dbt.execute.test] + ''' + + Usage: root dbt execute [OPTIONS] NAME DBT_COMMAND + + Execute a dbt command on Snowflake + + +- Arguments ------------------------------------------------------------------+ + | * name TEXT Identifier of the DBT Project; for example: my_pipeline | + | [required] | + +------------------------------------------------------------------------------+ + +- Options --------------------------------------------------------------------+ + | --run-async --no-run-async Run dbt command asynchronously and | + | check it's result later. | + | [default: no-run-async] | + | --help -h Show this message and exit. | + +------------------------------------------------------------------------------+ + +- Connection configuration ---------------------------------------------------+ + | --connection,--environment -c TEXT Name of the connection, as | + | defined in your config.toml | + | file. Default: default. | + | --host TEXT Host address for the | + | connection. Overrides the | + | value specified for the | + | connection. | + | --port INTEGER Port for the connection. | + | Overrides the value | + | specified for the | + | connection. | + | --account,--accountname TEXT Name assigned to your | + | Snowflake account. Overrides | + | the value specified for the | + | connection. | + | --user,--username TEXT Username to connect to | + | Snowflake. Overrides the | + | value specified for the | + | connection. | + | --password TEXT Snowflake password. | + | Overrides the value | + | specified for the | + | connection. | + | --authenticator TEXT Snowflake authenticator. | + | Overrides the value | + | specified for the | + | connection. | + | --private-key-file,--private… TEXT Snowflake private key file | + | path. Overrides the value | + | specified for the | + | connection. | + | --token-file-path TEXT Path to file with an OAuth | + | token to use when connecting | + | to Snowflake. | + | --database,--dbname TEXT Database to use. Overrides | + | the value specified for the | + | connection. | + | --schema,--schemaname TEXT Database schema to use. | + | Overrides the value | + | specified for the | + | connection. | + | --role,--rolename TEXT Role to use. Overrides the | + | value specified for the | + | connection. | + | --warehouse TEXT Warehouse to use. Overrides | + | the value specified for the | + | connection. | + | --temporary-connection -x Uses a connection defined | + | with command line | + | parameters, instead of one | + | defined in config | + | --mfa-passcode TEXT Token to use for | + | multi-factor authentication | + | (MFA) | + | --enable-diag Whether to generate a | + | connection diagnostic | + | report. | + | --diag-log-path TEXT Path for the generated | + | report. Defaults to system | + | temporary directory. | + | --diag-allowlist-path TEXT Path to a JSON file that | + | contains allowlist | + | parameters. | + +------------------------------------------------------------------------------+ + +- Global configuration -------------------------------------------------------+ + | --format [TABLE|JSON] Specifies the output format. | + | [default: TABLE] | + | --verbose -v Displays log entries for log | + | levels info and higher. | + | --debug Displays log entries for log | + | levels debug and higher; debug | + | logs contain additional | + | information. | + | --silent Turns off intermediate output | + | to console. | + | --enhanced-exit-codes Differentiate exit error codes | + | based on failure type. | + | [env var: | + | SNOWFLAKE_ENHANCED_EXIT_CODES] | + +------------------------------------------------------------------------------+ + +- Commands -------------------------------------------------------------------+ + | build Execute build command on Snowflake. | + | compile Execute compile command on Snowflake. | + | deps Execute deps command on Snowflake. | + | list Execute list command on Snowflake. | + | parse Execute parse command on Snowflake. | + | run Execute run command on Snowflake. | + | run-operation Execute run-operation command on Snowflake. | + | seed Execute seed command on Snowflake. | + | show Execute show command on Snowflake. | + | snapshot Execute snapshot command on Snowflake. | + | test Execute test command on Snowflake. | + +------------------------------------------------------------------------------+ + + + ''' +# --- +# name: test_help_messages[dbt.execute] + ''' + + Usage: root dbt execute [OPTIONS] NAME DBT_COMMAND + + Execute a dbt command on Snowflake + + +- Arguments ------------------------------------------------------------------+ + | * name TEXT Identifier of the DBT Project; for example: my_pipeline | + | [required] | + +------------------------------------------------------------------------------+ + +- Options --------------------------------------------------------------------+ + | --run-async --no-run-async Run dbt command asynchronously and | + | check it's result later. | + | [default: no-run-async] | + | --help -h Show this message and exit. | + +------------------------------------------------------------------------------+ + +- Connection configuration ---------------------------------------------------+ + | --connection,--environment -c TEXT Name of the connection, as | + | defined in your config.toml | + | file. Default: default. | + | --host TEXT Host address for the | + | connection. Overrides the | + | value specified for the | + | connection. | + | --port INTEGER Port for the connection. | + | Overrides the value | + | specified for the | + | connection. | + | --account,--accountname TEXT Name assigned to your | + | Snowflake account. Overrides | + | the value specified for the | + | connection. | + | --user,--username TEXT Username to connect to | + | Snowflake. Overrides the | + | value specified for the | + | connection. | + | --password TEXT Snowflake password. | + | Overrides the value | + | specified for the | + | connection. | + | --authenticator TEXT Snowflake authenticator. | + | Overrides the value | + | specified for the | + | connection. | + | --private-key-file,--private… TEXT Snowflake private key file | + | path. Overrides the value | + | specified for the | + | connection. | + | --token-file-path TEXT Path to file with an OAuth | + | token to use when connecting | + | to Snowflake. | + | --database,--dbname TEXT Database to use. Overrides | + | the value specified for the | + | connection. | + | --schema,--schemaname TEXT Database schema to use. | + | Overrides the value | + | specified for the | + | connection. | + | --role,--rolename TEXT Role to use. Overrides the | + | value specified for the | + | connection. | + | --warehouse TEXT Warehouse to use. Overrides | + | the value specified for the | + | connection. | + | --temporary-connection -x Uses a connection defined | + | with command line | + | parameters, instead of one | + | defined in config | + | --mfa-passcode TEXT Token to use for | + | multi-factor authentication | + | (MFA) | + | --enable-diag Whether to generate a | + | connection diagnostic | + | report. | + | --diag-log-path TEXT Path for the generated | + | report. Defaults to system | + | temporary directory. | + | --diag-allowlist-path TEXT Path to a JSON file that | + | contains allowlist | + | parameters. | + +------------------------------------------------------------------------------+ + +- Global configuration -------------------------------------------------------+ + | --format [TABLE|JSON] Specifies the output format. | + | [default: TABLE] | + | --verbose -v Displays log entries for log | + | levels info and higher. | + | --debug Displays log entries for log | + | levels debug and higher; debug | + | logs contain additional | + | information. | + | --silent Turns off intermediate output | + | to console. | + | --enhanced-exit-codes Differentiate exit error codes | + | based on failure type. | + | [env var: | + | SNOWFLAKE_ENHANCED_EXIT_CODES] | + +------------------------------------------------------------------------------+ + +- Commands -------------------------------------------------------------------+ + | build Execute build command on Snowflake. | + | compile Execute compile command on Snowflake. | + | deps Execute deps command on Snowflake. | + | list Execute list command on Snowflake. | + | parse Execute parse command on Snowflake. | + | run Execute run command on Snowflake. | + | run-operation Execute run-operation command on Snowflake. | + | seed Execute seed command on Snowflake. | + | show Execute show command on Snowflake. | + | snapshot Execute snapshot command on Snowflake. | + | test Execute test command on Snowflake. | + +------------------------------------------------------------------------------+ + + + ''' +# --- +# name: test_help_messages[dbt.list] + ''' + + Usage: root dbt list [OPTIONS] + + Lists all available dbt projects. + + +- Options --------------------------------------------------------------------+ + | --like -l TEXT SQL LIKE pattern for filtering objects by | + | name. For example, list --like "my%" lists | + | all dbt projects that begin with “my”. | + | [default: %%] | + | --in ... | + | | + | Specifies the scope of this command using | + | '--in ', for example list --in database | + | my_db. | + | [default: None, None] | + | --help -h Show this message and exit. | + +------------------------------------------------------------------------------+ + +- Connection configuration ---------------------------------------------------+ + | --connection,--environment -c TEXT Name of the connection, as | + | defined in your config.toml | + | file. Default: default. | + | --host TEXT Host address for the | + | connection. Overrides the | + | value specified for the | + | connection. | + | --port INTEGER Port for the connection. | + | Overrides the value | + | specified for the | + | connection. | + | --account,--accountname TEXT Name assigned to your | + | Snowflake account. Overrides | + | the value specified for the | + | connection. | + | --user,--username TEXT Username to connect to | + | Snowflake. Overrides the | + | value specified for the | + | connection. | + | --password TEXT Snowflake password. | + | Overrides the value | + | specified for the | + | connection. | + | --authenticator TEXT Snowflake authenticator. | + | Overrides the value | + | specified for the | + | connection. | + | --private-key-file,--private… TEXT Snowflake private key file | + | path. Overrides the value | + | specified for the | + | connection. | + | --token-file-path TEXT Path to file with an OAuth | + | token to use when connecting | + | to Snowflake. | + | --database,--dbname TEXT Database to use. Overrides | + | the value specified for the | + | connection. | + | --schema,--schemaname TEXT Database schema to use. | + | Overrides the value | + | specified for the | + | connection. | + | --role,--rolename TEXT Role to use. Overrides the | + | value specified for the | + | connection. | + | --warehouse TEXT Warehouse to use. Overrides | + | the value specified for the | + | connection. | + | --temporary-connection -x Uses a connection defined | + | with command line | + | parameters, instead of one | + | defined in config | + | --mfa-passcode TEXT Token to use for | + | multi-factor authentication | + | (MFA) | + | --enable-diag Whether to generate a | + | connection diagnostic | + | report. | + | --diag-log-path TEXT Path for the generated | + | report. Defaults to system | + | temporary directory. | + | --diag-allowlist-path TEXT Path to a JSON file that | + | contains allowlist | + | parameters. | + +------------------------------------------------------------------------------+ + +- Global configuration -------------------------------------------------------+ + | --format [TABLE|JSON] Specifies the output format. | + | [default: TABLE] | + | --verbose -v Displays log entries for log | + | levels info and higher. | + | --debug Displays log entries for log | + | levels debug and higher; debug | + | logs contain additional | + | information. | + | --silent Turns off intermediate output | + | to console. | + | --enhanced-exit-codes Differentiate exit error codes | + | based on failure type. | + | [env var: | + | SNOWFLAKE_ENHANCED_EXIT_CODES] | + +------------------------------------------------------------------------------+ + + + ''' +# --- +# name: test_help_messages[dbt] + ''' + + Usage: root dbt [OPTIONS] COMMAND [ARGS]... + + Manages dbt on Snowflake projects + + +- Options --------------------------------------------------------------------+ + | --help -h Show this message and exit. | + +------------------------------------------------------------------------------+ + +- Commands -------------------------------------------------------------------+ + | deploy Copy dbt files and create or update dbt on Snowflake project. | + | execute Execute a dbt command on Snowflake | + | list Lists all available dbt projects. | + +------------------------------------------------------------------------------+ + + ''' # --- # name: test_help_messages[git.copy] @@ -5118,19 +6731,22 @@ | [required] | +------------------------------------------------------------------------------+ +- Options --------------------------------------------------------------------+ - | --from TEXT The start time of the logs to retrieve. Accepts | - | all ISO8061 formats | - | [default: None] | - | --to TEXT The end time of the logs to retrieve. Accepts | - | all ISO8061 formats | - | [default: None] | - | --refresh INTEGER If set, the logs will be streamed with the given | - | refresh time in seconds | - | [default: None] | - | --table TEXT The table to query for logs. If not provided, | - | the default table will be used | - | [default: None] | - | --help -h Show this message and exit. | + | --from TEXT The start time of the logs to retrieve. | + | Accepts all ISO8061 formats | + | [default: None] | + | --to TEXT The end time of the logs to retrieve. Accepts | + | all ISO8061 formats | + | [default: None] | + | --refresh INTEGER If set, the logs will be streamed with the | + | given refresh time in seconds | + | [default: None] | + | --table TEXT The table to query for logs. If not provided, | + | the default table will be used | + | [default: None] | + | --log-level TEXT The log level to filter by. If not provided, | + | INFO will be used | + | [default: INFO] | + | --help -h Show this message and exit. | +------------------------------------------------------------------------------+ +- Connection configuration ---------------------------------------------------+ | --connection,--environment -c TEXT Name of the connection, as | @@ -14391,6 +16007,140 @@ +------------------------------------------------------------------------------+ + ''' +# --- +# name: test_help_messages_no_help_flag[dbt.execute] + ''' + + Usage: root dbt execute [OPTIONS] NAME DBT_COMMAND + + Execute a dbt command on Snowflake + + +- Arguments ------------------------------------------------------------------+ + | * name TEXT Identifier of the DBT Project; for example: my_pipeline | + | [required] | + +------------------------------------------------------------------------------+ + +- Options --------------------------------------------------------------------+ + | --run-async --no-run-async Run dbt command asynchronously and | + | check it's result later. | + | [default: no-run-async] | + | --help -h Show this message and exit. | + +------------------------------------------------------------------------------+ + +- Connection configuration ---------------------------------------------------+ + | --connection,--environment -c TEXT Name of the connection, as | + | defined in your config.toml | + | file. Default: default. | + | --host TEXT Host address for the | + | connection. Overrides the | + | value specified for the | + | connection. | + | --port INTEGER Port for the connection. | + | Overrides the value | + | specified for the | + | connection. | + | --account,--accountname TEXT Name assigned to your | + | Snowflake account. Overrides | + | the value specified for the | + | connection. | + | --user,--username TEXT Username to connect to | + | Snowflake. Overrides the | + | value specified for the | + | connection. | + | --password TEXT Snowflake password. | + | Overrides the value | + | specified for the | + | connection. | + | --authenticator TEXT Snowflake authenticator. | + | Overrides the value | + | specified for the | + | connection. | + | --private-key-file,--private… TEXT Snowflake private key file | + | path. Overrides the value | + | specified for the | + | connection. | + | --token-file-path TEXT Path to file with an OAuth | + | token to use when connecting | + | to Snowflake. | + | --database,--dbname TEXT Database to use. Overrides | + | the value specified for the | + | connection. | + | --schema,--schemaname TEXT Database schema to use. | + | Overrides the value | + | specified for the | + | connection. | + | --role,--rolename TEXT Role to use. Overrides the | + | value specified for the | + | connection. | + | --warehouse TEXT Warehouse to use. Overrides | + | the value specified for the | + | connection. | + | --temporary-connection -x Uses a connection defined | + | with command line | + | parameters, instead of one | + | defined in config | + | --mfa-passcode TEXT Token to use for | + | multi-factor authentication | + | (MFA) | + | --enable-diag Whether to generate a | + | connection diagnostic | + | report. | + | --diag-log-path TEXT Path for the generated | + | report. Defaults to system | + | temporary directory. | + | --diag-allowlist-path TEXT Path to a JSON file that | + | contains allowlist | + | parameters. | + +------------------------------------------------------------------------------+ + +- Global configuration -------------------------------------------------------+ + | --format [TABLE|JSON] Specifies the output format. | + | [default: TABLE] | + | --verbose -v Displays log entries for log | + | levels info and higher. | + | --debug Displays log entries for log | + | levels debug and higher; debug | + | logs contain additional | + | information. | + | --silent Turns off intermediate output | + | to console. | + | --enhanced-exit-codes Differentiate exit error codes | + | based on failure type. | + | [env var: | + | SNOWFLAKE_ENHANCED_EXIT_CODES] | + +------------------------------------------------------------------------------+ + +- Commands -------------------------------------------------------------------+ + | build Execute build command on Snowflake. | + | compile Execute compile command on Snowflake. | + | deps Execute deps command on Snowflake. | + | list Execute list command on Snowflake. | + | parse Execute parse command on Snowflake. | + | run Execute run command on Snowflake. | + | run-operation Execute run-operation command on Snowflake. | + | seed Execute seed command on Snowflake. | + | show Execute show command on Snowflake. | + | snapshot Execute snapshot command on Snowflake. | + | test Execute test command on Snowflake. | + +------------------------------------------------------------------------------+ + + + ''' +# --- +# name: test_help_messages_no_help_flag[dbt] + ''' + + Usage: root dbt [OPTIONS] COMMAND [ARGS]... + + Manages dbt on Snowflake projects + + +- Options --------------------------------------------------------------------+ + | --help -h Show this message and exit. | + +------------------------------------------------------------------------------+ + +- Commands -------------------------------------------------------------------+ + | deploy Copy dbt files and create or update dbt on Snowflake project. | + | execute Execute a dbt command on Snowflake | + | list Lists all available dbt projects. | + +------------------------------------------------------------------------------+ + + ''' # --- # name: test_help_messages_no_help_flag[git] diff --git a/tests/conftest.py b/tests/conftest.py index 7ac9811d19..655fc686f2 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -24,7 +24,7 @@ from io import StringIO from logging import FileHandler from pathlib import Path -from typing import Generator, List, NamedTuple, Optional, Union +from typing import Any, Dict, Generator, List, NamedTuple, Optional, Union from unittest import mock import pytest @@ -276,6 +276,7 @@ def __init__( self._checkout_count = 0 self._role = role self._warehouse = warehouse + self.kwargs: List[Dict[str, Any]] = [] def get_query(self): return "\n".join(self.queries) @@ -315,6 +316,7 @@ def execute_string(self, query: str, **kwargs): if self._checkout_count > 1: raise ProgrammingError("Checkout already exists") self.queries.append(query) + self.kwargs.append(kwargs) return (self.cs,) def execute_stream(self, query: StringIO, **kwargs): diff --git a/tests/dbt/__init__.py b/tests/dbt/__init__.py new file mode 100644 index 0000000000..e612998b27 --- /dev/null +++ b/tests/dbt/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2025 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/dbt/test_dbt_commands.py b/tests/dbt/test_dbt_commands.py new file mode 100644 index 0000000000..3a6f5492d6 --- /dev/null +++ b/tests/dbt/test_dbt_commands.py @@ -0,0 +1,446 @@ +# Copyright (c) 2025 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from pathlib import Path +from unittest import mock + +import pytest +import yaml +from snowflake.cli._plugins.dbt.constants import OUTPUT_COLUMN_NAME, RESULT_COLUMN_NAME +from snowflake.cli.api.identifiers import FQN + + +class TestDBTList: + def test_list_command_alias(self, mock_connect, runner): + result = runner.invoke( + [ + "object", + "list", + "dbt-project", + "--like", + "%PROJECT_NAME%", + "--in", + "database", + "my_db", + ] + ) + + assert result.exit_code == 0, result.output + result = runner.invoke( + ["dbt", "list", "--like", "%PROJECT_NAME%", "--in", "database", "my_db"], + catch_exceptions=False, + ) + assert result.exit_code == 0, result.output + + queries = mock_connect.mocked_ctx.get_queries() + assert len(queries) == 2 + assert ( + queries[0] + == queries[1] + == "show dbt projects like '%PROJECT_NAME%' in database my_db" + ) + + +class TestDBTDeploy: + @pytest.fixture + def dbt_project_path(self, tmp_path_factory): + source_path = tmp_path_factory.mktemp("dbt_project") + dbt_project_file = source_path / "dbt_project.yml" + dbt_project_file.write_text(yaml.dump({"profile": "dev"})) + dbt_profiles_file = source_path / "profiles.yml" + dbt_profiles_file.write_text( + yaml.dump( + { + "dev": { + "outputs": { + "local": { + "account": "test_account", + "database": "testdb", + "role": "test_role", + "schema": "test_schema", + "threads": 2, + "type": "snowflake", + "user": "test_user", + "warehouse": "test_warehouse", + } + } + } + }, + ) + ) + yield source_path + + @pytest.fixture + def mock_cli_console(self): + with mock.patch("snowflake.cli.api.console") as _fixture: + yield _fixture + + @pytest.fixture + def mock_exists(self): + with mock.patch( + "snowflake.cli._plugins.dbt.manager.DBTManager.exists", return_value=False + ) as _fixture: + yield _fixture + + @mock.patch("snowflake.cli._plugins.dbt.manager.StageManager.put_recursive") + @mock.patch("snowflake.cli._plugins.dbt.manager.StageManager.create") + def test_deploys_project_from_source( + self, + mock_create, + mock_put_recursive, + mock_connect, + runner, + dbt_project_path, + mock_exists, + ): + + result = runner.invoke( + [ + "dbt", + "deploy", + "TEST_PIPELINE", + f"--source={dbt_project_path}", + ] + ) + + assert result.exit_code == 0, result.output + assert ( + mock_connect.mocked_ctx.get_query() + == """CREATE DBT PROJECT TEST_PIPELINE +FROM @MockDatabase.MockSchema.dbt_TEST_PIPELINE_stage""" + ) + stage_fqn = FQN.from_string(f"dbt_TEST_PIPELINE_stage").using_context() + mock_create.assert_called_once_with(stage_fqn, temporary=True) + mock_put_recursive.assert_called_once_with( + dbt_project_path, "@MockDatabase.MockSchema.dbt_TEST_PIPELINE_stage" + ) + + @mock.patch("snowflake.cli._plugins.dbt.manager.StageManager.put_recursive") + @mock.patch("snowflake.cli._plugins.dbt.manager.StageManager.create") + def test_force_flag_uses_create_or_replace( + self, + _mock_create, + _mock_put_recursive, + mock_connect, + runner, + dbt_project_path, + ): + + result = runner.invoke( + [ + "dbt", + "deploy", + "TEST_PIPELINE", + f"--source={dbt_project_path}", + "--force", + ] + ) + + assert result.exit_code == 0, result.output + assert mock_connect.mocked_ctx.get_query().startswith( + "CREATE OR REPLACE DBT PROJECT" + ) + + @mock.patch("snowflake.cli._plugins.dbt.manager.StageManager.put_recursive") + @mock.patch("snowflake.cli._plugins.dbt.manager.StageManager.create") + def test_alters_existing_object( + self, + _mock_create, + _mock_put_recursive, + mock_connect, + runner, + dbt_project_path, + mock_exists, + ): + mock_exists.return_value = True + + result = runner.invoke( + [ + "dbt", + "deploy", + "TEST_PIPELINE", + f"--source={dbt_project_path}", + ] + ) + + assert result.exit_code == 0, result.output + assert mock_connect.mocked_ctx.get_query().startswith( + """ALTER DBT PROJECT TEST_PIPELINE ADD VERSION +FROM @MockDatabase.MockSchema.dbt_TEST_PIPELINE_stage""" + ) + + @mock.patch("snowflake.cli._plugins.dbt.manager.StageManager.put_recursive") + @mock.patch("snowflake.cli._plugins.dbt.manager.StageManager.put") + @mock.patch("snowflake.cli._plugins.dbt.manager.StageManager.create") + def test_dbt_deploy_with_custom_profiles_dir( + self, + _mock_create, + mock_put, + _mock_put_recursive, + mock_connect, + runner, + dbt_project_path, + mock_exists, + ): + new_profiles_directory = Path(dbt_project_path) / "dbt_profiles" + new_profiles_directory.mkdir(parents=True, exist_ok=True) + profiles_file = dbt_project_path / "profiles.yml" + profiles_file.rename(new_profiles_directory / "profiles.yml") + + result = runner.invoke( + [ + "dbt", + "deploy", + "TEST_PIPELINE", + f"--source={dbt_project_path}", + f"--profiles-dir={new_profiles_directory}", + ] + ) + + assert result.exit_code == 0, result.output + mock_put.assert_called_once_with( + str(new_profiles_directory / "profiles.yml"), + "@MockDatabase.MockSchema.dbt_TEST_PIPELINE_stage", + ) + + def test_raises_when_dbt_project_yml_is_not_available( + self, dbt_project_path, mock_connect, runner + ): + dbt_file = dbt_project_path / "dbt_project.yml" + dbt_file.unlink() + + result = runner.invoke( + [ + "dbt", + "deploy", + "TEST_PIPELINE", + f"--source={dbt_project_path}", + ], + ) + + assert result.exit_code == 1, result.output + assert f"dbt_project.yml does not exist in directory" in result.output + assert mock_connect.mocked_ctx.get_query() == "" + + def test_raises_when_dbt_project_yml_does_not_specify_profile( + self, dbt_project_path, mock_connect, runner + ): + with open((dbt_project_path / "dbt_project.yml"), "w") as f: + yaml.dump({}, f) + + result = runner.invoke( + [ + "dbt", + "deploy", + "TEST_PIPELINE", + f"--source={dbt_project_path}", + ], + ) + + assert result.exit_code == 1, result.output + assert "`profile` is not defined in dbt_project.yml" in result.output + assert mock_connect.mocked_ctx.get_query() == "" + + def test_raises_when_profiles_yml_is_not_available( + self, dbt_project_path, mock_connect, runner + ): + (dbt_project_path / "profiles.yml").unlink() + + result = runner.invoke( + [ + "dbt", + "deploy", + "TEST_PIPELINE", + f"--source={dbt_project_path}", + ], + ) + + assert result.exit_code == 1, result.output + assert f"profiles.yml does not exist in directory" in result.output + assert mock_connect.mocked_ctx.get_query() == "" + + def test_raises_when_profiles_yml_does_not_contain_selected_profile( + self, dbt_project_path, mock_connect, runner + ): + with open((dbt_project_path / "profiles.yml"), "w") as f: + yaml.dump({}, f) + + result = runner.invoke( + [ + "dbt", + "deploy", + "TEST_PIPELINE", + f"--source={dbt_project_path}", + ], + ) + + assert result.exit_code == 1, result.output + assert "profile dev is not defined in profiles.yml" in result.output + assert mock_connect.mocked_ctx.get_query() == "" + + +class TestDBTExecute: + @pytest.mark.parametrize( + "args,expected_query", + [ + pytest.param( + [ + "dbt", + "execute", + "pipeline_name", + "test", + ], + "EXECUTE DBT PROJECT pipeline_name args='test'", + id="simple-command", + ), + pytest.param( + [ + "dbt", + "execute", + "pipeline_name", + "run", + "-f", + "--select @source:snowplow,tag:nightly models/export", + ], + "EXECUTE DBT PROJECT pipeline_name args='run -f --select @source:snowplow,tag:nightly models/export'", + id="with-dbt-options", + ), + pytest.param( + ["dbt", "execute", "pipeline_name", "compile", "--vars '{foo:bar}'"], + "EXECUTE DBT PROJECT pipeline_name args='compile --vars '{foo:bar}''", + id="with-dbt-vars", + ), + pytest.param( + [ + "dbt", + "execute", + "pipeline_name", + "compile", + "--format=TXT", # collision with CLI's option; unsupported option + "-v", # collision with CLI's option + "-h", + "--debug", + "--info", + "--config-file=/", + ], + "EXECUTE DBT PROJECT pipeline_name args='compile --format=TXT -v -h --debug --info --config-file=/'", + id="with-dbt-conflicting-options", + ), + pytest.param( + [ + "dbt", + "execute", + "--format=JSON", + "pipeline_name", + "compile", + ], + "EXECUTE DBT PROJECT pipeline_name args='compile'", + id="with-cli-flag", + ), + ], + ) + def test_dbt_execute(self, mock_connect, mock_cursor, runner, args, expected_query): + cursor = mock_cursor( + rows=[(True, "very detailed logs")], + columns=[RESULT_COLUMN_NAME, OUTPUT_COLUMN_NAME], + ) + mock_connect.mocked_ctx.cs = cursor + + result = runner.invoke(args) + + assert result.exit_code == 0, result.output + assert mock_connect.mocked_ctx.kwargs[0]["_exec_async"] is False + assert mock_connect.mocked_ctx.get_query() == expected_query + + def test_execute_async(self, mock_connect, runner): + result = runner.invoke( + [ + "dbt", + "execute", + "--run-async", + "pipeline_name", + "compile", + ] + ) + + assert result.exit_code == 0, result.output + assert result.output.startswith("Command submitted") + assert mock_connect.mocked_ctx.kwargs[0]["_exec_async"] is True + assert ( + mock_connect.mocked_ctx.get_query() + == "EXECUTE DBT PROJECT pipeline_name args='compile'" + ) + + def test_dbt_execute_dbt_failure_returns_non_0_code( + self, mock_connect, mock_cursor, runner + ): + cursor = mock_cursor( + rows=[(False, "1 of 4 FAIL 1 not_null_my_first_dbt_model_id")], + columns=[RESULT_COLUMN_NAME, OUTPUT_COLUMN_NAME], + ) + mock_connect.mocked_ctx.cs = cursor + + result = runner.invoke( + [ + "dbt", + "execute", + "pipeline_name", + "test", + ] + ) + + assert result.exit_code == 1, result.output + assert "1 of 4 FAIL 1 not_null_my_first_dbt_model_id" in result.output + + def test_dbt_execute_malformed_server_response( + self, mock_connect, mock_cursor, runner + ): + cursor = mock_cursor( + rows=[(True, "very detailed logs")], + columns=["foo", "bar"], + ) + mock_connect.mocked_ctx.cs = cursor + + result = runner.invoke( + [ + "dbt", + "execute", + "pipeline_name", + "test", + ] + ) + + assert result.exit_code == 1, result.output + assert "Malformed server response" in result.output + + def test_dbt_execute_no_rows_in_response(self, mock_connect, mock_cursor, runner): + cursor = mock_cursor( + rows=[], + columns=[RESULT_COLUMN_NAME, OUTPUT_COLUMN_NAME], + ) + mock_connect.mocked_ctx.cs = cursor + + result = runner.invoke( + [ + "dbt", + "execute", + "pipeline_name", + "test", + ] + ) + + assert result.exit_code == 1, result.output + assert "No data returned from server" in result.output diff --git a/tests/dbt/test_manager.py b/tests/dbt/test_manager.py new file mode 100644 index 0000000000..7669b0e7a5 --- /dev/null +++ b/tests/dbt/test_manager.py @@ -0,0 +1,128 @@ +from textwrap import dedent + +import pytest +import yaml +from snowflake.cli._plugins.dbt.manager import DBTManager +from snowflake.cli.api.exceptions import CliError +from snowflake.cli.api.secure_path import SecurePath + + +class TestDeploy: + @pytest.fixture() + def profile(self): + return { + "dev": { + "outputs": { + "local": { + "account": "test_account", + "database": "testdb", + "role": "test_role", + "schema": "test_schema", + "threads": 4, + "type": "snowflake", + "user": "test_user", + "warehouse": "test_warehouse", + } + } + } + } + + @pytest.fixture + def project_path(self, tmp_path_factory): + source_path = tmp_path_factory.mktemp("dbt_project") + yield source_path + + def _generate_profile(self, project_path, profile): + dbt_profiles_file = project_path / "profiles.yml" + dbt_profiles_file.write_text(yaml.dump(profile)) + + def test_validate_profiles_raises_when_file_does_not_exist(self, project_path): + + with pytest.raises(CliError) as exc_info: + DBTManager._validate_profiles( # noqa: SLF001 + SecurePath(project_path), "dev" + ) + + assert ( + exc_info.value.message + == f"profiles.yml does not exist in directory {project_path.absolute()}." + ) + + def test_validate_profiles_raises_when_profile_is_not_in_the_file( + self, project_path, profile + ): + self._generate_profile(project_path, profile) + + with pytest.raises(CliError) as exc_info: + DBTManager._validate_profiles( # noqa: SLF001 + SecurePath(project_path), "another_profile_name" + ) + + assert ( + exc_info.value.message + == "profile another_profile_name is not defined in profiles.yml" + ) + + def test_validate_profiles_raises_when_extra_profiles_are_defined( + self, project_path, profile + ): + profile["another_profile"] = {} + self._generate_profile(project_path, profile) + + with pytest.raises(CliError) as exc_info: + DBTManager._validate_profiles( # noqa: SLF001 + SecurePath(project_path), "dev" + ) + + expected_error_message = """Found following errors in profiles.yml. Please fix them before proceeding: +another_profile + * Remove unnecessary profiles""" + assert exc_info.value.message == dedent(expected_error_message) + + def test_validate_profiles_raises_when_required_fields_are_missing( + self, project_path, profile + ): + profile["dev"]["outputs"]["local"].pop("warehouse", None) + profile["dev"]["outputs"]["local"].pop("role", None) + self._generate_profile(project_path, profile) + + with pytest.raises(CliError) as exc_info: + DBTManager._validate_profiles( # noqa: SLF001 + SecurePath(project_path), "dev" + ) + + expected_error_message = """Found following errors in profiles.yml. Please fix them before proceeding: +dev + * Missing required fields: role, warehouse in target local""" + assert exc_info.value.message == dedent(expected_error_message) + + def test_validate_profiles_raises_when_unsupported_fields_are_provided( + self, project_path, profile + ): + profile["dev"]["outputs"]["local"]["password"] = "very secret password" + self._generate_profile(project_path, profile) + + with pytest.raises(CliError) as exc_info: + DBTManager._validate_profiles( # noqa: SLF001 + SecurePath(project_path), "dev" + ) + + expected_error_message = """Found following errors in profiles.yml. Please fix them before proceeding: +dev + * Unsupported fields found: password in target local""" + assert exc_info.value.message == dedent(expected_error_message) + assert "very secret password" not in exc_info.value.message + + def test_validate_profiles_raises_when_type_is_wrong(self, project_path, profile): + profile["dev"]["outputs"]["local"]["type"] = "sqlite" + self._generate_profile(project_path, profile) + + with pytest.raises(CliError) as exc_info: + DBTManager._validate_profiles( # noqa: SLF001 + SecurePath(project_path), "dev" + ) + + expected_error_message = """Found following errors in profiles.yml. Please fix them before proceeding: +dev + * Value for type field is invalid. Should be set to `snowflake` in target local""" + assert exc_info.value.message == dedent(expected_error_message) diff --git a/tests/logs/__snapshots__/test_logs.ambr b/tests/logs/__snapshots__/test_logs.ambr index c01ff40ebd..89517c6405 100644 --- a/tests/logs/__snapshots__/test_logs.ambr +++ b/tests/logs/__snapshots__/test_logs.ambr @@ -10,7 +10,7 @@ value::string as log_message FROM SNOWFLAKE.TELEMETRY.EVENTS WHERE record_type = 'LOG' - AND (record:severity_text = 'INFO' or record:severity_text is NULL ) + AND (record:severity_text IN ('INFO', 'WARN', 'ERROR', 'FATAL') or record:severity_text is NULL ) AND object_name = 'bar' AND timestamp >= TO_TIMESTAMP_LTZ('2022-02-02T02:02:02') AND timestamp <= TO_TIMESTAMP_LTZ('2022-02-03T02:02:02') @@ -29,7 +29,7 @@ value::string as log_message FROM bar WHERE record_type = 'LOG' - AND (record:severity_text = 'INFO' or record:severity_text is NULL ) + AND (record:severity_text IN ('INFO', 'WARN', 'ERROR', 'FATAL') or record:severity_text is NULL ) AND object_name = 'bar' AND timestamp >= TO_TIMESTAMP_LTZ('2022-02-02T02:02:02') AND timestamp <= TO_TIMESTAMP_LTZ('2022-02-03T02:02:02') @@ -48,7 +48,7 @@ value::string as log_message FROM foo WHERE record_type = 'LOG' - AND (record:severity_text = 'INFO' or record:severity_text is NULL ) + AND (record:severity_text IN ('INFO', 'WARN', 'ERROR', 'FATAL') or record:severity_text is NULL ) AND object_name = 'bar' AND timestamp >= TO_TIMESTAMP_LTZ('2022-02-02T02:02:02') AND timestamp <= TO_TIMESTAMP_LTZ('2022-02-03T02:02:02') @@ -56,6 +56,14 @@ ORDER BY timestamp; ''' # --- +# name: test_if_incorrect_log_level_causes_error + ''' + +- Error ----------------------------------------------------------------------+ + | Invalid log level. Please choose from TRACE, DEBUG, INFO, WARN, ERROR, FATAL | + +------------------------------------------------------------------------------+ + + ''' +# --- # name: test_providing_time_in_incorrect_format_causes_error[2024-11-03 12:00:00 UTC---from] ''' +- Error ----------------------------------------------------------------------+ diff --git a/tests/logs/test_logs.py b/tests/logs/test_logs.py index f8da712c96..718c1a0c98 100644 --- a/tests/logs/test_logs.py +++ b/tests/logs/test_logs.py @@ -77,3 +77,17 @@ def test_correct_query_is_constructed(mock_connect, mock_ctx, runner, snapshot, queries = ctx.get_queries() assert len(queries) == 1 assert queries[0] == snapshot + + +def test_if_incorrect_log_level_causes_error(runner, snapshot): + result = runner.invoke( + [ + "logs", + "table", + "test_table", + "--log-level", + "NOTALEVEL", + ] + ) + assert result.exit_code == 1 + assert result.output == snapshot diff --git a/tests/logs/test_logs_manager.py b/tests/logs/test_logs_utils.py similarity index 68% rename from tests/logs/test_logs_manager.py rename to tests/logs/test_logs_utils.py index bf98494d54..d6df2eec77 100644 --- a/tests/logs/test_logs_manager.py +++ b/tests/logs/test_logs_utils.py @@ -5,7 +5,11 @@ from snowflake.cli._plugins.logs.commands import ( get_datetime_from_string, ) -from snowflake.cli._plugins.logs.manager import LogsManager +from snowflake.cli._plugins.logs.utils import ( + get_log_levels, + get_timestamp_query, + parse_log_levels_for_query, +) DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S" @@ -74,10 +78,45 @@ def test_if_passing_to_time_earlier_than_from_time_raiser_error(): to_time = from_time - timedelta(hours=1) with pytest.raises(ClickException) as e: - LogsManager()._get_timestamp_query(from_time=from_time, to_time=to_time) # noqa + get_timestamp_query(from_time=from_time, to_time=to_time) # noqa assert ( str(e.value) == "From_time cannot be later than to_time. Please check the values" ) assert e.value.exit_code == 1 + + +@pytest.mark.parametrize( + "log_level,expected", + [ + ("", ["INFO", "WARN", "ERROR", "FATAL"]), + ("TRACE", ["TRACE", "DEBUG", "INFO", "WARN", "ERROR", "FATAL"]), + ("DEBUG", ["DEBUG", "INFO", "WARN", "ERROR", "FATAL"]), + ("INFO", ["INFO", "WARN", "ERROR", "FATAL"]), + ("WARN", ["WARN", "ERROR", "FATAL"]), + ("ERROR", ["ERROR", "FATAL"]), + ("FATAL", ["FATAL"]), + ("fatal", ["FATAL"]), + ("eRrOr", ["ERROR", "FATAL"]), + ], +) +def test_if_log_levels_list_is_correctly_filtered(log_level, expected): + result = get_log_levels(log_level) + + assert result == expected + + +@pytest.mark.parametrize( + "level,expected", + [ + ("", "'INFO', 'WARN', 'ERROR', 'FATAL'"), + ("INFO", "'INFO', 'WARN', 'ERROR', 'FATAL'"), + ("DEBUG", "'DEBUG', 'INFO', 'WARN', 'ERROR', 'FATAL'"), + ("wArN", "'WARN', 'ERROR', 'FATAL'"), + ], +) +def test_if_log_level_gives_correct_query(level, expected): + result = parse_log_levels_for_query(level) + + assert result == expected diff --git a/tests/spcs/test_registry.py b/tests/spcs/test_registry.py index 816d1a7532..be8e14d06c 100644 --- a/tests/spcs/test_registry.py +++ b/tests/spcs/test_registry.py @@ -73,11 +73,40 @@ def test_get_registry_url(mock_execute, mock_conn, mock_cursor): ] mock_execute.return_value = mock_cursor( - rows=[{col: row for col, row in zip(MOCK_REPO_COLUMNS, mock_row)}], + rows=[{}, {col: row for col, row in zip(MOCK_REPO_COLUMNS, mock_row)}], columns=MOCK_REPO_COLUMNS, ) result = RegistryManager().get_registry_url() expected_query = "show image repositories in account" + assert mock_execute.call_count == 2 + mock_execute.assert_any_call(expected_query, cursor_class=DictCursor) + assert result == "orgname-alias.registry.snowflakecomputing.com" + + +@mock.patch("snowflake.cli._plugins.spcs.image_registry.manager.RegistryManager._conn") +@mock.patch( + "snowflake.cli._plugins.spcs.image_registry.manager.RegistryManager.execute_query" +) +def test_get_registry_url_with_schema_query(mock_execute, mock_conn, mock_cursor): + mock_row = [ + "2023-01-01 00:00:00", + "IMAGES", + "DB", + "SCHEMA", + "orgname-alias.registry.snowflakecomputing.com/DB/SCHEMA/IMAGES", + "TEST_ROLE", + "ROLE", + "", + ] + + mock_execute.return_value = mock_cursor( + rows=[{col: row for col, row in zip(MOCK_REPO_COLUMNS, mock_row)}], + columns=MOCK_REPO_COLUMNS, + ) + + result = RegistryManager().get_registry_url() + expected_query = "show image repositories in schema snowflake.images;" + mock_execute.assert_called_once_with(expected_query, cursor_class=DictCursor) assert result == "orgname-alias.registry.snowflakecomputing.com" @@ -88,14 +117,17 @@ def test_get_registry_url(mock_execute, mock_conn, mock_cursor): ) def test_get_registry_url_no_repositories(mock_execute, mock_conn, mock_cursor): mock_execute.return_value = mock_cursor( - rows=[], + rows=[{}, {}], columns=MOCK_REPO_COLUMNS, ) with pytest.raises(NoImageRepositoriesFoundError): RegistryManager().get_registry_url() - expected_query = "show image repositories in account" - mock_execute.assert_called_once_with(expected_query, cursor_class=DictCursor) + expected_query1 = "show image repositories in schema snowflake.images;" + expected_query2 = "show image repositories in account" + assert mock_execute.call_count == 2 + mock_execute.assert_any_call(expected_query1, cursor_class=DictCursor) + mock_execute.assert_any_call(expected_query2, cursor_class=DictCursor) @mock.patch( diff --git a/tests_integration/__snapshots__/test_dbt.ambr b/tests_integration/__snapshots__/test_dbt.ambr new file mode 100644 index 0000000000..531b4ec430 --- /dev/null +++ b/tests_integration/__snapshots__/test_dbt.ambr @@ -0,0 +1,11 @@ +# serializer version: 1 +# name: test_deploy_and_execute + ''' + ╭─ Error ──────────────────────────────────────────────────────────────────────╮ + │ Found following errors in profiles.yml. Please fix them before proceeding: │ + │ dbt_integration_project │ + │ * Unsupported fields found: password in target dev │ + ╰──────────────────────────────────────────────────────────────────────────────╯ + + ''' +# --- diff --git a/tests_integration/conftest.py b/tests_integration/conftest.py index b9bb16b0a2..6e22ea8701 100644 --- a/tests_integration/conftest.py +++ b/tests_integration/conftest.py @@ -185,6 +185,19 @@ def invoke_with_connection( ) -> CommandResult: return self.invoke_with_config([*args, "-c", connection], **kwargs) + def invoke_passthrough_with_connection( + self, + args, + connection: str = "integration", + passthrough_args: Optional[list[str]] = None, + **kwargs, + ) -> CommandResult: + if passthrough_args is None: + passthrough_args = list() + return self.invoke_with_config( + [*args, "-c", connection, *passthrough_args], **kwargs + ) + @pytest.fixture def runner(test_snowcli_config_provider, default_username, resource_suffix): diff --git a/tests_integration/test_data/projects/dbt_project/dbt_project.yml b/tests_integration/test_data/projects/dbt_project/dbt_project.yml new file mode 100644 index 0000000000..55a4e79ef0 --- /dev/null +++ b/tests_integration/test_data/projects/dbt_project/dbt_project.yml @@ -0,0 +1,20 @@ +name: 'dbt_integration_project' +version: '1.0.0' + +profile: 'dbt_integration_project' + +model-paths: ["models"] +analysis-paths: ["analyses"] +test-paths: ["tests"] +seed-paths: ["seeds"] +macro-paths: ["macros"] +snapshot-paths: ["snapshots"] + +clean-targets: + - "target" + - "dbt_packages" + +models: + dbt_integration_project: + example: + +materialized: view diff --git a/tests_integration/test_data/projects/dbt_project/models/example/my_first_dbt_model.sql b/tests_integration/test_data/projects/dbt_project/models/example/my_first_dbt_model.sql new file mode 100644 index 0000000000..983588f0eb --- /dev/null +++ b/tests_integration/test_data/projects/dbt_project/models/example/my_first_dbt_model.sql @@ -0,0 +1,11 @@ +{{ config(materialized='table') }} + +with source_data as ( + select 1 as id + union all + select null as id +) + +select * +from source_data +where id is not null diff --git a/tests_integration/test_data/projects/dbt_project/models/example/my_second_dbt_model.sql b/tests_integration/test_data/projects/dbt_project/models/example/my_second_dbt_model.sql new file mode 100644 index 0000000000..7e2c031f11 --- /dev/null +++ b/tests_integration/test_data/projects/dbt_project/models/example/my_second_dbt_model.sql @@ -0,0 +1,3 @@ +select * +from {{ ref('my_first_dbt_model') }} +where id = 1 diff --git a/tests_integration/test_data/projects/dbt_project/models/example/schema.yml b/tests_integration/test_data/projects/dbt_project/models/example/schema.yml new file mode 100644 index 0000000000..9730b7071b --- /dev/null +++ b/tests_integration/test_data/projects/dbt_project/models/example/schema.yml @@ -0,0 +1,21 @@ + +version: 2 + +models: + - name: my_first_dbt_model + description: "A starter dbt model" + columns: + - name: id + description: "The primary key for this table" + data_tests: + - unique + - not_null + + - name: my_second_dbt_model + description: "A starter dbt model" + columns: + - name: id + description: "The primary key for this table" + data_tests: + - unique + - not_null diff --git a/tests_integration/test_data/projects/dbt_project/profiles.yml b/tests_integration/test_data/projects/dbt_project/profiles.yml new file mode 100644 index 0000000000..1452b36448 --- /dev/null +++ b/tests_integration/test_data/projects/dbt_project/profiles.yml @@ -0,0 +1,6 @@ +dbt_integration_project: + target: dev + outputs: + dev: + type: snowflake + threads: 2 diff --git a/tests_integration/test_data/projects/snowpark_v2/c.py b/tests_integration/test_data/projects/snowpark_v2/c.py index 3ab4a6d6cc..14aed7edba 100644 --- a/tests_integration/test_data/projects/snowpark_v2/c.py +++ b/tests_integration/test_data/projects/snowpark_v2/c.py @@ -3,7 +3,14 @@ # test import import syrupy +import logging + +log = logging.getLogger("SnowCLI_Logs_Test") def hello_function(name: str) -> str: + log.debug("This is a debug message") + log.info("This is an info message") + log.warning("This is a warning message") + log.error("This is an error message") return f"Hello {name}!" diff --git a/tests_integration/test_dbt.py b/tests_integration/test_dbt.py new file mode 100644 index 0000000000..970da78ae0 --- /dev/null +++ b/tests_integration/test_dbt.py @@ -0,0 +1,166 @@ +# Copyright (c) 2025 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import datetime +from pathlib import Path + +import pytest +import yaml + + +@pytest.mark.integration +@pytest.mark.qa_only +def test_deploy_and_execute( + runner, + snowflake_session, + test_database, + project_directory, + snapshot, +): + with project_directory("dbt_project") as root_dir: + # Given a local dbt project + ts = int(datetime.datetime.now().timestamp()) + name = f"dbt_project_{ts}" + + # try to deploy, but fail since profiles.yml contains a password + _setup_dbt_profile(root_dir, snowflake_session, include_password=True) + result = runner.invoke_with_connection_json(["dbt", "deploy", name]) + assert result.exit_code == 1, result.output + assert result.output == snapshot + + # deploy for the first time + _setup_dbt_profile(root_dir, snowflake_session, include_password=False) + result = runner.invoke_with_connection_json(["dbt", "deploy", name]) + assert result.exit_code == 0, result.output + + # change location of profiles.yml and redeploy + new_profiles_directory = Path(root_dir) / "dbt_profiles" + new_profiles_directory.mkdir(parents=True, exist_ok=True) + profiles_file = root_dir / "profiles.yml" + profiles_file.rename(new_profiles_directory / "profiles.yml") + + result = runner.invoke_with_connection_json( + [ + "dbt", + "deploy", + name, + "--profiles-dir", + str(new_profiles_directory.resolve()), + ] + ) + assert result.exit_code == 0, result.output + + # list all dbt objects + result = runner.invoke_with_connection_json( + [ + "dbt", + "list", + "--like", + name, + ] + ) + assert result.exit_code == 0, result.output + assert len(result.json) == 1 + dbt_object = result.json[0] + assert dbt_object["name"].lower() == name.lower() + + # call `run` on dbt object + result = runner.invoke_passthrough_with_connection( + args=[ + "dbt", + "execute", + ], + passthrough_args=[name, "run"], + ) + + # a successful execution should produce data in my_second_dbt_model and + assert result.exit_code == 0, result.output + assert "Done. PASS=2 WARN=0 ERROR=0 SKIP=0 TOTAL=2" in result.output + + result = runner.invoke_with_connection_json( + ["sql", "-q", "select count(*) as COUNT from my_second_dbt_model;"] + ) + assert len(result.json) == 1, result.json + assert result.json[0]["COUNT"] == 1, result.json[0] + + +@pytest.mark.integration +@pytest.mark.qa_only +def test_dbt_deploy_options( + runner, + snowflake_session, + test_database, + project_directory, +): + with project_directory("dbt_project") as root_dir: + # Given a local dbt project + ts = int(datetime.datetime.now().timestamp()) + name = f"dbt_project_{ts}" + + # deploy for the first time - create new dbt object + _setup_dbt_profile(root_dir, snowflake_session, include_password=False) + result = runner.invoke_with_connection_json(["dbt", "deploy", name]) + assert result.exit_code == 0, result.output + + timestamp_after_create = _fetch_creation_date(name, runner) + + # deploy for the second time - alter existing object + result = runner.invoke_with_connection_json(["dbt", "deploy", name]) + assert result.exit_code == 0, result.output + + timestamp_after_alter = _fetch_creation_date(name, runner) + assert ( + timestamp_after_alter == timestamp_after_create + ), f"Timestamps differ: {timestamp_after_alter} vs {timestamp_after_create}" + + # deploy for the third time - this time with --force flag to replace dbt object + result = runner.invoke_with_connection_json(["dbt", "deploy", name, "--force"]) + assert result.exit_code == 0, result.output + + timestamp_after_replace = _fetch_creation_date(name, runner) + assert ( + timestamp_after_replace > timestamp_after_create + ), f"Timestamps are the same: {timestamp_after_replace} vs {timestamp_after_create}" + + +def _fetch_creation_date(name, runner) -> datetime.datetime: + result = runner.invoke_with_connection_json( + [ + "dbt", + "list", + "--like", + name, + ] + ) + assert result.exit_code == 0, result.output + assert len(result.json) == 1 + dbt_object = result.json[0] + assert dbt_object["name"].lower() == name.lower() + return datetime.datetime.fromisoformat(dbt_object["created_on"]) + + +def _setup_dbt_profile(root_dir: Path, snowflake_session, include_password: bool): + with open((root_dir / "profiles.yml"), "r") as f: + profiles = yaml.safe_load(f) + dev_profile = profiles["dbt_integration_project"]["outputs"]["dev"] + dev_profile["database"] = snowflake_session.database + dev_profile["account"] = snowflake_session.account + dev_profile["user"] = snowflake_session.user + dev_profile["role"] = snowflake_session.role + dev_profile["warehouse"] = snowflake_session.warehouse + dev_profile["schema"] = snowflake_session.schema + if include_password: + dev_profile["password"] = "secret_phrase" + else: + dev_profile.pop("password", None) + (root_dir / "profiles.yml").write_text(yaml.dump(profiles))