From e715fe0aba6d15ec9e2041834ed962c3c415f567 Mon Sep 17 00:00:00 2001 From: morgan-dgk Date: Thu, 23 Oct 2025 13:57:32 +1100 Subject: [PATCH] =?UTF-8?q?=F0=9F=A9=B9=20Remove=20table=5Fowner=20from=20?= =?UTF-8?q?text=20only=20cols?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR #1057 added several new column names to the `text_only_columns` arg to the `table_from_rows` call in the `_catalog_filter_table` class method. This method is called as part of the `dbt docs generate` task. However, some of the new column names included are not included in the list of columns returned by certain adapter implementations for the `__get_catalog` and `__get_catalog_relations`. For example, the relevant BigQuery and Athena macros do not return a column named `table_owner` (see #1135). This results in the agate type checker emitting a warning when attempting typing checking runs. Any columns added to the `text_only_columns` argument for the call to `table_from_rows` in the base implementation should presumably be columns that are returned by all adapters implementations. --- dbt-adapters/src/dbt/adapters/base/impl.py | 1 - .../src/dbt/adapters/snowflake/impl.py | 21 ++++++++++++++++++- 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/dbt-adapters/src/dbt/adapters/base/impl.py b/dbt-adapters/src/dbt/adapters/base/impl.py index be9e709a6..9152510c8 100644 --- a/dbt-adapters/src/dbt/adapters/base/impl.py +++ b/dbt-adapters/src/dbt/adapters/base/impl.py @@ -1329,7 +1329,6 @@ def _catalog_filter_table( "table_name", "table_type", "table_comment", - "table_owner", "column_name", "column_type", "column_comment", diff --git a/dbt-snowflake/src/dbt/adapters/snowflake/impl.py b/dbt-snowflake/src/dbt/adapters/snowflake/impl.py index 563cc55ae..a255a7cf2 100644 --- a/dbt-snowflake/src/dbt/adapters/snowflake/impl.py +++ b/dbt-snowflake/src/dbt/adapters/snowflake/impl.py @@ -120,8 +120,27 @@ def _catalog_filter_table( ) -> "agate.Table": # On snowflake, users can set QUOTED_IDENTIFIERS_IGNORE_CASE, so force # the column names to their lowercased forms. + from dbt_common.clients.agate_helper import table_from_rows + from dbt.adapters.base.impl import _catalog_filter_schemas + lowered = table.rename(column_names=[c.lower() for c in table.column_names]) - return super()._catalog_filter_table(lowered, used_schemas) + + table = table_from_rows( + lowered.rows, + lowered.column_names, + text_only_columns=[ + "table_database", + "table_schema", + "table_name", + "table_type", + "table_owner", + "table_comment", + "column_name", + "column_type", + "column_comment", + ], + ) + return table.where(_catalog_filter_schemas(used_schemas)) def _make_match_kwargs(self, database, schema, identifier): # if any path part is already quoted then consider same casing but without quotes