diff --git a/web-common/src/features/templates/GroupedFieldsRenderer.svelte b/web-common/src/features/templates/GroupedFieldsRenderer.svelte
index f7fa5aabd1c..85fbc12a891 100644
--- a/web-common/src/features/templates/GroupedFieldsRenderer.svelte
+++ b/web-common/src/features/templates/GroupedFieldsRenderer.svelte
@@ -49,7 +49,6 @@
) => EnumOption[];
// Props for rich select support (nested ConnectionTypeSelector)
- export let disabledOptions: Record = {};
export let groupedFieldsMap: Map<
string,
Record
@@ -75,7 +74,7 @@
handleSelectChange(childKey, newValue)}
/>
@@ -91,7 +90,6 @@
{getTabFieldsForOption}
{tabGroupedFields}
{buildEnumOptions}
- {disabledOptions}
{groupedFieldsMap}
{getGroupedFieldsForOption}
{handleSelectChange}
diff --git a/web-common/src/features/templates/JSONSchemaFormRenderer.svelte b/web-common/src/features/templates/JSONSchemaFormRenderer.svelte
index 975c95a208f..cbada5580ec 100644
--- a/web-common/src/features/templates/JSONSchemaFormRenderer.svelte
+++ b/web-common/src/features/templates/JSONSchemaFormRenderer.svelte
@@ -26,9 +26,6 @@
// Icon mapping for select options
export let iconMap: Record> = {};
- // Map of option value → disabled reason for rich select options
- export let disabledOptions: Record = {};
-
// Use `any` for form values since field types are determined by JSON schema at runtime
type FormData = Record;
@@ -418,7 +415,7 @@
handleSelectChange(key, newValue)}
/>
@@ -434,7 +431,6 @@
{getTabFieldsForOption}
{tabGroupedFields}
buildEnumOptions={buildEnumOptionsWithIconMap}
- {disabledOptions}
groupedFieldsMap={groupedFields}
{getGroupedFieldsForOption}
{handleSelectChange}
@@ -467,7 +463,6 @@
{getTabFieldsForOption}
{tabGroupedFields}
buildEnumOptions={buildEnumOptionsWithIconMap}
- {disabledOptions}
groupedFieldsMap={groupedFields}
{getGroupedFieldsForOption}
{handleSelectChange}
@@ -497,7 +492,6 @@
{getTabFieldsForOption}
{tabGroupedFields}
buildEnumOptions={buildEnumOptionsWithIconMap}
- {disabledOptions}
groupedFieldsMap={groupedFields}
{getGroupedFieldsForOption}
{handleSelectChange}
@@ -526,7 +520,7 @@
handleSelectChange(childKey, newValue)}
@@ -546,7 +540,6 @@
{getTabFieldsForOption}
{tabGroupedFields}
buildEnumOptions={buildEnumOptionsWithIconMap}
- {disabledOptions}
groupedFieldsMap={groupedFields}
{getGroupedFieldsForOption}
{handleSelectChange}
@@ -583,7 +576,6 @@
{getTabFieldsForOption}
{tabGroupedFields}
buildEnumOptions={buildEnumOptionsWithIconMap}
- {disabledOptions}
groupedFieldsMap={groupedFields}
{getGroupedFieldsForOption}
{handleSelectChange}
diff --git a/web-common/src/features/templates/schemas/iceberg.ts b/web-common/src/features/templates/schemas/iceberg.ts
index e336618b055..a4611cafe1c 100644
--- a/web-common/src/features/templates/schemas/iceberg.ts
+++ b/web-common/src/features/templates/schemas/iceberg.ts
@@ -29,6 +29,11 @@ export const icebergSchema: MultiStepFormSchema = {
"Read Iceberg tables from Azure Blob Storage",
],
"x-ui-only": true,
+ "x-required-driver": {
+ gcs: "gcs",
+ s3: "s3",
+ azure: "azure",
+ },
"x-grouped-fields": {
gcs: ["gcs_info", "gcs_path"],
s3: ["s3_info", "s3_path"],
diff --git a/web-common/src/features/templates/schemas/types.ts b/web-common/src/features/templates/schemas/types.ts
index e4f71ac89b8..f85faea6067 100644
--- a/web-common/src/features/templates/schemas/types.ts
+++ b/web-common/src/features/templates/schemas/types.ts
@@ -66,6 +66,12 @@ export type JSONSchemaField = {
* Group fields under tab options for enum-driven tab layouts.
*/
"x-tab-group"?: Record;
+ /**
+ * Maps enum option values to required connector driver names.
+ * Options whose required driver is not found among existing connectors
+ * are rendered as disabled in ConnectionTypeSelector.
+ */
+ "x-required-driver"?: Record;
/**
* Explicit environment variable name for secret fields.
* When set, this name is used instead of computing it from driver + property key.
From 669d1198c86fbf0a05962f778909997476803f1b Mon Sep 17 00:00:00 2001
From: royendo <67675319+royendo@users.noreply.github.com>
Date: Tue, 10 Mar 2026 10:16:54 -0400
Subject: [PATCH 16/19] web code qual
---
web-common/src/features/templates/GroupedFieldsRenderer.svelte | 1 -
1 file changed, 1 deletion(-)
diff --git a/web-common/src/features/templates/GroupedFieldsRenderer.svelte b/web-common/src/features/templates/GroupedFieldsRenderer.svelte
index 0b8fb79030e..e2095b04343 100644
--- a/web-common/src/features/templates/GroupedFieldsRenderer.svelte
+++ b/web-common/src/features/templates/GroupedFieldsRenderer.svelte
@@ -7,7 +7,6 @@
import ConnectionTypeSelector from "./ConnectionTypeSelector.svelte";
import {
type EnumOption,
- isRadioEnum,
isRichSelectEnum,
isSelectEnum,
isTabsEnum,
From bf407b6d86d1012cb96155c90c017bcc1903a121 Mon Sep 17 00:00:00 2001
From: royendo <67675319+royendo@users.noreply.github.com>
Date: Tue, 10 Mar 2026 15:08:44 -0400
Subject: [PATCH 17/19] as req
---
.../build/connectors/data-source/iceberg.md | 7 +-
.../reference/project-files/connectors.md | 80 -------------------
runtime/drivers/duckdb/duckdb.go | 2 -
runtime/parser/schema/project.schema.yaml | 76 +-----------------
.../resolvers/testdata/connector_iceberg.yaml | 17 +---
5 files changed, 9 insertions(+), 173 deletions(-)
diff --git a/docs/docs/developers/build/connectors/data-source/iceberg.md b/docs/docs/developers/build/connectors/data-source/iceberg.md
index be45e187b7b..85ac35732b7 100644
--- a/docs/docs/developers/build/connectors/data-source/iceberg.md
+++ b/docs/docs/developers/build/connectors/data-source/iceberg.md
@@ -20,7 +20,7 @@ Iceberg tables can be read from any of the following storage backends:
| Backend | URI format | Authentication |
|---|---|---|
| Amazon S3 | `s3://bucket/path/to/table` | Requires an [S3 connector](/developers/build/connectors/data-source/s3) |
-| Google Cloud Storage | `gs://bucket/path/to/table` | Requires a [GCS connector](/developers/build/connectors/data-source/gcs) |
+| Google Cloud Storage | `gs://bucket/path/to/table` | Requires a [GCS connector](/developers/build/connectors/data-source/gcs) with HMAC keys |
| Azure Blob Storage | `azure://container/path/to/table` | Requires an [Azure connector](/developers/build/connectors/data-source/azure) |
| Local filesystem | `/path/to/table` | No authentication needed |
@@ -58,6 +58,10 @@ sql: |
### Reading from GCS
+:::info HMAC keys required
+DuckDB's `iceberg_scan()` authenticates to GCS using HMAC keys, not JSON service account credentials. When configuring your [GCS connector](/developers/build/connectors/data-source/gcs), use the `key_id` and `secret` (HMAC) properties instead of `google_application_credentials`.
+:::
+
```yaml
type: model
connector: duckdb
@@ -126,4 +130,5 @@ Ensure your storage connector credentials are configured in your Rill Cloud proj
- **Direct file access only**: Rill reads Iceberg metadata and data files directly from storage. Catalog integrations (Hive Metastore, AWS Glue, REST catalog) are not supported.
- **DuckDB engine**: Iceberg support is currently provided through DuckDB's Iceberg extension. Additional engine support (e.g., ClickHouse) is planned.
+- **GCS requires HMAC keys**: DuckDB's `iceberg_scan()` only supports HMAC authentication for GCS, not JSON service account credentials.
- **Read-only**: Rill reads from Iceberg tables but does not write to them.
diff --git a/docs/docs/reference/project-files/connectors.md b/docs/docs/reference/project-files/connectors.md
index 213c94a8c09..00e1fe9881a 100644
--- a/docs/docs/reference/project-files/connectors.md
+++ b/docs/docs/reference/project-files/connectors.md
@@ -534,86 +534,6 @@ headers:
"Authorization": 'Bearer {{ .env.HTTPS_TOKEN }}' # HTTP headers to include in the request
```
-## Iceberg
-
-Apache Iceberg tables are read through DuckDB's `iceberg_scan()` function. Iceberg is not a standalone connector; instead, configure a model that uses DuckDB with `iceberg_scan()`. For cloud storage backends, a corresponding storage connector (S3, GCS, or Azure) must be configured with valid credentials. See the [Iceberg documentation](/developers/build/connectors/data-source/iceberg) for more details.
-
-
-### `driver`
-
-_[string]_ - Must be `duckdb`. Iceberg tables are read through DuckDB's native Iceberg extension. _(required)_
-
-### `sql`
-
-_[string]_ - SQL query using `iceberg_scan()` to read the Iceberg table. The function accepts the table path and optional parameters:
-- `allow_moved_paths` (boolean): Allow reading tables where data files have been moved from their original location.
-- `version` (string): Read a specific Iceberg snapshot version instead of the latest.
-
-
-### `create_secrets_from_connectors`
-
-_[string, array]_ - Storage connector name(s) to use for authentication when reading Iceberg tables from cloud storage (e.g., `s3`, `gcs`, `azure`).
-
-### `materialize`
-
-_[boolean]_ - Whether to materialize the model in the OLAP engine. Defaults to `true` for source models.
-
-```yaml
-# Example: Iceberg model reading from S3
-type: model
-connector: duckdb
-create_secrets_from_connectors: s3
-materialize: true
-sql: |
- SELECT *
- FROM iceberg_scan('s3://my-bucket/path/to/iceberg_table')
-```
-
-```yaml
-# Example: Iceberg model reading from GCS
-type: model
-connector: duckdb
-create_secrets_from_connectors: gcs
-materialize: true
-sql: |
- SELECT *
- FROM iceberg_scan('gs://my-bucket/path/to/iceberg_table')
-```
-
-```yaml
-# Example: Iceberg model reading from Azure
-type: model
-connector: duckdb
-create_secrets_from_connectors: azure
-materialize: true
-sql: |
- SELECT *
- FROM iceberg_scan('azure://my-container/path/to/iceberg_table')
-```
-
-```yaml
-# Example: Iceberg model reading from local filesystem
-type: model
-connector: duckdb
-materialize: true
-sql: |
- SELECT *
- FROM iceberg_scan('/path/to/iceberg_table')
-```
-
-```yaml
-# Example: Iceberg model with optional parameters
-type: model
-connector: duckdb
-create_secrets_from_connectors: s3
-materialize: true
-sql: |
- SELECT *
- FROM iceberg_scan('s3://my-bucket/path/to/iceberg_table',
- allow_moved_paths = true,
- version = '2')
-```
-
## MotherDuck
### `driver`
diff --git a/runtime/drivers/duckdb/duckdb.go b/runtime/drivers/duckdb/duckdb.go
index 1f0996be846..9e3e47de00f 100644
--- a/runtime/drivers/duckdb/duckdb.go
+++ b/runtime/drivers/duckdb/duckdb.go
@@ -550,14 +550,12 @@ func (c *connection) reopenDB(ctx context.Context) error {
"INSTALL 'parquet'",
"INSTALL 'httpfs'",
"INSTALL 'spatial'",
- "INSTALL 'iceberg'",
"LOAD 'json'",
"LOAD 'sqlite'",
"LOAD 'icu'",
"LOAD 'parquet'",
"LOAD 'httpfs'",
"LOAD 'spatial'",
- "LOAD 'iceberg'",
"SET GLOBAL timezone='UTC'",
"SET GLOBAL old_implicit_casting = true", // Implicit Cast to VARCHAR
)
diff --git a/runtime/parser/schema/project.schema.yaml b/runtime/parser/schema/project.schema.yaml
index 06c77c443c0..b99b076d21a 100644
--- a/runtime/parser/schema/project.schema.yaml
+++ b/runtime/parser/schema/project.schema.yaml
@@ -529,80 +529,8 @@ definitions:
required:
- driver
- path
- - type: object
- title: Iceberg
- description: |
- Apache Iceberg tables are read through DuckDB's `iceberg_scan()` function. Iceberg is not a standalone connector; instead, configure a model that uses DuckDB with `iceberg_scan()`. For cloud storage backends, a corresponding storage connector (S3, GCS, or Azure) must be configured with valid credentials. See the [Iceberg documentation](/developers/build/connectors/data-source/iceberg) for more details.
- properties:
- driver:
- type: string
- description: Must be `duckdb`. Iceberg tables are read through DuckDB's native Iceberg extension.
- const: duckdb
- sql:
- type: string
- description: |
- SQL query using `iceberg_scan()` to read the Iceberg table. The function accepts the table path and optional parameters:
- - `allow_moved_paths` (boolean): Allow reading tables where data files have been moved from their original location.
- - `version` (string): Read a specific Iceberg snapshot version instead of the latest.
- create_secrets_from_connectors:
- type:
- - string
- - array
- items:
- type: string
- description: Storage connector name(s) to use for authentication when reading Iceberg tables from cloud storage (e.g., `s3`, `gcs`, `azure`).
- materialize:
- type: boolean
- description: Whether to materialize the model in the OLAP engine. Defaults to `true` for source models.
- examples:
- - # Example: Iceberg model reading from S3
- type: model
- connector: duckdb
- create_secrets_from_connectors: s3
- materialize: true
-
- sql: |
- SELECT *
- FROM iceberg_scan('s3://my-bucket/path/to/iceberg_table')
- - # Example: Iceberg model reading from GCS
- type: model
- connector: duckdb
- create_secrets_from_connectors: gcs
- materialize: true
-
- sql: |
- SELECT *
- FROM iceberg_scan('gs://my-bucket/path/to/iceberg_table')
- - # Example: Iceberg model reading from Azure
- type: model
- connector: duckdb
- create_secrets_from_connectors: azure
- materialize: true
-
- sql: |
- SELECT *
- FROM iceberg_scan('azure://my-container/path/to/iceberg_table')
- - # Example: Iceberg model reading from local filesystem
- type: model
- connector: duckdb
- materialize: true
-
- sql: |
- SELECT *
- FROM iceberg_scan('/path/to/iceberg_table')
- - # Example: Iceberg model with optional parameters
- type: model
- connector: duckdb
- create_secrets_from_connectors: s3
- materialize: true
-
- sql: |
- SELECT *
- FROM iceberg_scan('s3://my-bucket/path/to/iceberg_table',
- allow_moved_paths = true,
- version = '2')
- required:
- - driver
+ # Note: Iceberg is not a standalone connector. It uses DuckDB's iceberg_scan() function.
+ # See /developers/build/connectors/data-source/iceberg for configuration details.
- type: object
title: MotherDuck
properties:
diff --git a/runtime/resolvers/testdata/connector_iceberg.yaml b/runtime/resolvers/testdata/connector_iceberg.yaml
index 8cb32458ac2..a83b58581db 100644
--- a/runtime/resolvers/testdata/connector_iceberg.yaml
+++ b/runtime/resolvers/testdata/connector_iceberg.yaml
@@ -2,14 +2,6 @@ expensive: true
connectors:
- gcs
project_files:
- iceberg_gcs_public.yaml:
- type: model
- connector: duckdb
- materialize: true
- sql: |
- SELECT *
- FROM iceberg_scan('gs://rilldata-public/iceberg/lineitem_iceberg',
- allow_moved_paths = true)
iceberg_gcs_with_secrets.yaml:
type: model
connector: duckdb
@@ -17,7 +9,7 @@ project_files:
materialize: true
sql: |
SELECT *
- FROM iceberg_scan('gs://rilldata-public/iceberg/lineitem_iceberg',
+ FROM iceberg_scan('gs://integration-test.rilldata.com/iceberg/lineitem_iceberg',
allow_moved_paths = true)
tests:
- name: test_iceberg_gcs_with_secrets_row_count
@@ -49,10 +41,3 @@ tests:
l_shipinstruct,VARCHAR,YES,,,
l_shipmode,VARCHAR,YES,,,
l_comment,VARCHAR,YES,,,
- - name: test_iceberg_gcs_public_row_count
- resolver: sql
- properties:
- sql: "select count(*) as count from iceberg_gcs_public"
- result_csv: |
- count
- 51793
From 910d7f3e6dea16b6d237a1e2588018574fe0f719 Mon Sep 17 00:00:00 2001
From: royendo <67675319+royendo@users.noreply.github.com>
Date: Tue, 10 Mar 2026 16:41:14 -0400
Subject: [PATCH 18/19] also removing other "models" from connector
---
.../developers/build/connectors/connectors.md | 3 -
.../connectors/data-source/data-source.md | 12 ++-
.../reference/project-files/connectors.md | 55 -------------
docs/docs/reference/project-files/models.md | 14 ----
runtime/parser/schema/project.schema.yaml | 78 -------------------
5 files changed, 9 insertions(+), 153 deletions(-)
diff --git a/docs/docs/developers/build/connectors/connectors.md b/docs/docs/developers/build/connectors/connectors.md
index 1e14910ba86..e25abe06687 100644
--- a/docs/docs/developers/build/connectors/connectors.md
+++ b/docs/docs/developers/build/connectors/connectors.md
@@ -164,7 +164,6 @@ Rill is continually evaluating additional OLAP engines that can be added. For a
content="Connect to SQLite databases for lightweight, file-based data storage and querying."
link="/developers/build/connectors/data-source/sqlite"
linkLabel="Learn more"
- referenceLink="sqlite"
/>
}
@@ -226,7 +225,6 @@ Rill is continually evaluating additional OLAP engines that can be added. For a
content="Read Iceberg tables directly from object storage through compatible query engines."
link="/developers/build/connectors/data-source/iceberg"
linkLabel="Learn more"
- referenceLink="iceberg"
/>
@@ -276,7 +274,6 @@ Rill is continually evaluating additional OLAP engines that can be added. For a
content="Connect to Salesforce to extract data from objects and queries using the Salesforce API."
link="/developers/build/connectors/data-source/salesforce"
linkLabel="Learn more"
- referenceLink="salesforce"
/>
diff --git a/docs/docs/developers/build/connectors/data-source/data-source.md b/docs/docs/developers/build/connectors/data-source/data-source.md
index 7373bb72a34..5c760e32935 100644
--- a/docs/docs/developers/build/connectors/data-source/data-source.md
+++ b/docs/docs/developers/build/connectors/data-source/data-source.md
@@ -84,6 +84,7 @@ Rill supports connecting your data to both [DuckDB](/developers/build/connectors
### MySQL
### PostgreSQL
### SQLite
+### Supabase
+ }
+ header="Supabase"
+ content="Connect to Supabase's managed PostgreSQL databases with SSL support and standard connection methods."
+ link="/developers/build/connectors/data-source/supabase"
+ linkLabel="Learn more"
+ referenceLink="supabase"
/>
@@ -158,7 +166,6 @@ Rill supports connecting your data to both [DuckDB](/developers/build/connectors
content="Read Iceberg tables directly from object storage through compatible query engines."
link="/developers/build/connectors/data-source/iceberg"
linkLabel="Learn more"
- referenceLink="iceberg"
/>
@@ -208,7 +215,6 @@ Rill supports connecting your data to both [DuckDB](/developers/build/connectors
content="Connect to Salesforce to extract data from objects and queries using the Salesforce API."
link="/developers/build/connectors/data-source/salesforce"
linkLabel="Learn more"
- referenceLink="salesforce"
/>
diff --git a/docs/docs/reference/project-files/connectors.md b/docs/docs/reference/project-files/connectors.md
index 00e1fe9881a..13aa17b5887 100644
--- a/docs/docs/reference/project-files/connectors.md
+++ b/docs/docs/reference/project-files/connectors.md
@@ -26,7 +26,6 @@ Connector YAML files define how Rill connects to external data sources and OLAP
### _Databases_
- [**MySQL**](#mysql) - MySQL databases
- [**PostgreSQL**](#postgres) - PostgreSQL databases
-- [**SQLite**](#sqlite) - SQLite databases
- [**Supabase**](#supabase) - Supabase (managed PostgreSQL)
### _Object Storage_
@@ -45,7 +44,6 @@ Connector YAML files define how Rill connects to external data sources and OLAP
### _Other_
- [**HTTPS**](#https) - Public files via HTTP/HTTPS
-- [**Salesforce**](#salesforce) - Salesforce data
:::warning Security Recommendation
For all credential parameters (passwords, tokens, keys), use environment variables with the syntax `{{ .env.KEY_NAME }}`. This keeps sensitive data out of your YAML files and version control. See our [credentials documentation](/developers/build/connectors/credentials/) for complete setup instructions.
@@ -1130,43 +1128,6 @@ endpoint: "https://my-s3-endpoint.com" # Optional custom endpoint URL for S3-com
region: "us-east-1" # AWS region of the S3 bucket
```
-## Salesforce
-
-### `driver`
-
-_[string]_ - Refers to the driver type and must be driver `salesforce` _(required)_
-
-### `username`
-
-_[string]_ - Salesforce account username _(required)_
-
-### `password`
-
-_[string]_ - Salesforce account password (secret)
-
-### `key`
-
-_[string]_ - Authentication key for Salesforce (secret)
-
-### `endpoint`
-
-_[string]_ - Salesforce API endpoint URL _(required)_
-
-### `client_id`
-
-_[string]_ - Client ID used for Salesforce OAuth authentication _(required)_
-
-```yaml
-# Example: Salesforce connector configuration
-type: connector # Must be `connector` (required)
-driver: salesforce # Must be `salesforce` _(required)_
-username: "myusername" # Salesforce account username
-password: "{{ .env.SALESFORCE_PASSWORD }}" # Salesforce account password (secret)
-key: "{{ .env.SALESFORCE_KEY }}" # Authentication key for Salesforce (secret)
-endpoint: "https://login.salesforce.com" # Salesforce API endpoint URL
-client_id: "my-client-id" # Client ID used for Salesforce OAuth authentication
-```
-
## Slack
### `driver`
@@ -1280,19 +1241,3 @@ dsn: "{{ .env.SNOWFLAKE_DSN }}" # define SNOWFLAKE_DSN in .env file
parallel_fetch_limit: 2
```
-## SQLite
-
-### `driver`
-
-_[string]_ - Refers to the driver type and must be driver `sqlite` _(required)_
-
-### `dsn`
-
-_[string]_ - DSN(Data Source Name) for the sqlite connection _(required)_
-
-```yaml
-# Example: SQLite connector configuration
-type: connector # Must be `connector` (required)
-driver: sqlite # Must be `sqlite` _(required)_
-dsn: "file:mydatabase.db" # DSN for the sqlite connection
-```
\ No newline at end of file
diff --git a/docs/docs/reference/project-files/models.md b/docs/docs/reference/project-files/models.md
index 9b7aa5efeab..f087719270e 100644
--- a/docs/docs/reference/project-files/models.md
+++ b/docs/docs/reference/project-files/models.md
@@ -673,20 +673,6 @@ _[object]_ - Settings related to glob file matching.
_[string]_ - Size of a batch (e.g., '100MB')
-## Additional properties when `connector` is `salesforce` or [named connector](./connectors#salesforce) of salesforce
-
-### `soql`
-
-_[string]_ - SOQL query to execute against the Salesforce instance.
-
-### `sobject`
-
-_[string]_ - Salesforce object (e.g., Account, Contact) targeted by the query.
-
-### `queryAll`
-
-_[boolean]_ - Whether to include deleted and archived records in the query (uses queryAll API).
-
## Examples
```yaml
diff --git a/runtime/parser/schema/project.schema.yaml b/runtime/parser/schema/project.schema.yaml
index b99b076d21a..69fd252efcf 100644
--- a/runtime/parser/schema/project.schema.yaml
+++ b/runtime/parser/schema/project.schema.yaml
@@ -63,7 +63,6 @@ definitions:
### _Databases_
- [**MySQL**](#mysql) - MySQL databases
- [**PostgreSQL**](#postgres) - PostgreSQL databases
- - [**SQLite**](#sqlite) - SQLite databases
- [**Supabase**](#supabase) - Supabase (managed PostgreSQL)
### _Object Storage_
@@ -82,7 +81,6 @@ definitions:
### _Other_
- [**HTTPS**](#https) - Public files via HTTP/HTTPS
- - [**Salesforce**](#salesforce) - Salesforce data
:::warning Security Recommendation
For all credential parameters (passwords, tokens, keys), use environment variables with the syntax `{{ .env.KEY_NAME }}`. This keeps sensitive data out of your YAML files and version control. See our [credentials documentation](/developers/build/connectors/credentials/) for complete setup instructions.
@@ -1062,42 +1060,6 @@ definitions:
required:
- driver
- bucket
- - type: object
- title: Salesforce
- properties:
- driver:
- type: string
- description: Refers to the driver type and must be driver `salesforce`
- username:
- type: string
- description: Salesforce account username
- password:
- type: string
- description: Salesforce account password (secret)
- key:
- type: string
- description: Authentication key for Salesforce (secret)
- endpoint:
- type: string
- description: Salesforce API endpoint URL
- client_id:
- type: string
- description: Client ID used for Salesforce OAuth authentication
- examples:
- - # Example: Salesforce connector configuration
- type: connector # Must be `connector` (required)
- driver: salesforce # Must be `salesforce` _(required)_
-
- username: "myusername" # Salesforce account username
- password: "{{ .env.SALESFORCE_PASSWORD }}" # Salesforce account password (secret)
- key: "{{ .env.SALESFORCE_KEY }}" # Authentication key for Salesforce (secret)
- endpoint: "https://login.salesforce.com" # Salesforce API endpoint URL
- client_id: "my-client-id" # Client ID used for Salesforce OAuth authentication
- required:
- - driver
- - username
- - endpoint
- - client_id
- type: object
title: Slack
properties:
@@ -1202,24 +1164,6 @@ definitions:
required:
- type
- driver
- - type: object
- title: SQLite
- properties:
- driver:
- type: string
- description: Refers to the driver type and must be driver `sqlite`
- dsn:
- type: string
- description: DSN(Data Source Name) for the sqlite connection
- examples:
- - # Example: SQLite connector configuration
- type: connector # Must be `connector` (required)
- driver: sqlite # Must be `sqlite` _(required)_
-
- dsn: "file:mydatabase.db" # DSN for the sqlite connection
- required:
- - driver
- - dsn
# Source YAML
@@ -1257,7 +1201,6 @@ definitions:
- redshift
- postgres
- supabase
- - sqlite
- snowflake
- bigquery
- duckdb
@@ -1734,15 +1677,6 @@ definitions:
- connector
then:
$ref: '#/definitions/models/definitions/s3'
- - if:
- title: Additional properties when `connector` is `salesforce` or [named connector](./connectors#salesforce) of salesforce
- properties:
- connector:
- const: salesforce
- required:
- - connector
- then:
- $ref: '#/definitions/models/definitions/salesforce'
definitions:
athena:
type: object
@@ -1924,18 +1858,6 @@ definitions:
batch_size:
type: string
description: 'Size of a batch (e.g., ''100MB'')'
- salesforce:
- type: object
- properties:
- soql:
- type: string
- description: SOQL query to execute against the Salesforce instance.
- sobject:
- type: string
- description: Salesforce object (e.g., Account, Contact) targeted by the query.
- queryAll:
- type: boolean
- description: Whether to include deleted and archived records in the query (uses queryAll API).
examples:
- ### Incremental model
type: model
From 1f6c6fcf0abc17e1b044ee38ec8a6a1a5e9f1e15 Mon Sep 17 00:00:00 2001
From: royendo <67675319+royendo@users.noreply.github.com>
Date: Tue, 10 Mar 2026 16:45:12 -0400
Subject: [PATCH 19/19] clean up
---
.../build/connectors/data-source/salesforce.md | 4 ++--
.../build/connectors/data-source/sqlite.md | 17 +++++++++--------
docs/docs/reference/project-files/connectors.md | 6 +-----
runtime/parser/schema/project.schema.yaml | 3 ---
4 files changed, 12 insertions(+), 18 deletions(-)
diff --git a/docs/docs/developers/build/connectors/data-source/salesforce.md b/docs/docs/developers/build/connectors/data-source/salesforce.md
index 2937107526f..b96148d51e2 100644
--- a/docs/docs/developers/build/connectors/data-source/salesforce.md
+++ b/docs/docs/developers/build/connectors/data-source/salesforce.md
@@ -14,7 +14,7 @@ sidebar_position: 65
## Local credentials
-When using Rill Developer on your local machine, you will need to provide your credentials via a connector file. We would recommend not using plain text to create your file and instead use the `.env` file. For more details on your connector, see [connector YAML](/reference/project-files/connectors#salesforce) for more details.
+When using Rill Developer on your local machine, you will need to provide your credentials via a connector file. We would recommend not using plain text to create your file and instead use the `.env` file. For more details on your connector, see [connector YAML](/reference/project-files/connectors) for more details.
:::tip Updating the project environmental variable
@@ -44,7 +44,7 @@ If this project has already been deployed to Rill Cloud and credentials have bee
## Deploy to Rill Cloud
-When deploying a project to Rill Cloud, Rill requires you to explicitly provide Salesforce credentials used in your project. Please refer to our [connector YAML reference docs](/reference/project-files/connectors#salesforce) for more information.
+When deploying a project to Rill Cloud, Rill requires you to explicitly provide Salesforce credentials used in your project. Please refer to our [connector YAML reference docs](/reference/project-files/connectors) for more information.
If you subsequently add sources that require new credentials (or if you simply entered the wrong credentials during the initial deploy), you can update the credentials by pushing the `Deploy` button to update your project or by running the following command in the CLI:
```
diff --git a/docs/docs/developers/build/connectors/data-source/sqlite.md b/docs/docs/developers/build/connectors/data-source/sqlite.md
index 2e37886780a..eef801cbd93 100644
--- a/docs/docs/developers/build/connectors/data-source/sqlite.md
+++ b/docs/docs/developers/build/connectors/data-source/sqlite.md
@@ -13,22 +13,23 @@ sidebar_position: 80
## Connect to SQLite
+SQLite databases are read through DuckDB's [SQLite extension](https://duckdb.org/docs/extensions/sqlite.html) using the `sqlite_scan()` function. No separate connector is needed.
-In many cases, since SQLite is used as an in-process database, credentials are not required. Instead, Rill will need to know the path to the SQLite database file so that it can be read accordingly.
+Create a model file (e.g., `models/my_sqlite_data.yaml`):
```yaml
-type: connector
-driver: sqlite
+type: model
+connector: duckdb
+materialize: true
-dsn: "file:mydatabase.db"
+sql: |
+ SELECT *
+ FROM sqlite_scan('data/mydatabase.db', 'my_table')
```
-Alternatively, you can create the connector directly using the [connector YAML reference documentation](/reference/project-files/connectors#sqlite).
-
-
:::tip
-If you plan to deploy the project to Rill Cloud, it is recommended that you move the SQLite database file to a `data` folder in your Rill project home directory. You can then use the relative path of the db file in your source definition (e.g., `data/test_sqlite.db`).
+If you plan to deploy the project to Rill Cloud, place the SQLite database file in a `data` folder in your Rill project directory and use the relative path (e.g., `data/mydatabase.db`).
:::
diff --git a/docs/docs/reference/project-files/connectors.md b/docs/docs/reference/project-files/connectors.md
index 13aa17b5887..9ce32f863bb 100644
--- a/docs/docs/reference/project-files/connectors.md
+++ b/docs/docs/reference/project-files/connectors.md
@@ -39,9 +39,6 @@ Connector YAML files define how Rill connects to external data sources and OLAP
- [**Gemini**](#gemini) - Gemini connector for chat with your own API key
- [**Slack**](#slack) - Slack data
-### _Table Formats_
-- [**Iceberg**](#iceberg) - Apache Iceberg tables via DuckDB
-
### _Other_
- [**HTTPS**](#https) - Public files via HTTP/HTTPS
@@ -1239,5 +1236,4 @@ type: connector
driver: snowflake
dsn: "{{ .env.SNOWFLAKE_DSN }}" # define SNOWFLAKE_DSN in .env file
parallel_fetch_limit: 2
-```
-
+```
\ No newline at end of file
diff --git a/runtime/parser/schema/project.schema.yaml b/runtime/parser/schema/project.schema.yaml
index 69fd252efcf..99c79370d02 100644
--- a/runtime/parser/schema/project.schema.yaml
+++ b/runtime/parser/schema/project.schema.yaml
@@ -76,9 +76,6 @@ definitions:
- [**Gemini**](#gemini) - Gemini connector for chat with your own API key
- [**Slack**](#slack) - Slack data
- ### _Table Formats_
- - [**Iceberg**](#iceberg) - Apache Iceberg tables via DuckDB
-
### _Other_
- [**HTTPS**](#https) - Public files via HTTP/HTTPS