Skip to content

Commit 706e7b0

Browse files
committed
chore: simplify resource name
1 parent f5ad371 commit 706e7b0

File tree

9 files changed

+43
-43
lines changed

9 files changed

+43
-43
lines changed

docs/data-sources/project_apikeys.md renamed to docs/data-sources/apikeys.md

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,23 +1,29 @@
11
---
22
# generated by https://github.com/hashicorp/terraform-plugin-docs
3-
page_title: "supabase_project_apikeys Data Source - terraform-provider-supabase"
3+
page_title: "supabase_apikeys Data Source - terraform-provider-supabase"
44
subcategory: ""
55
description: |-
6-
Project API Keys data source
6+
API Keys data source
77
---
88

9-
# supabase_project_apikeys (Data Source)
9+
# supabase_apikeys (Data Source)
1010

11-
Project API Keys data source
11+
API Keys data source
1212

13+
## Example Usage
1314

15+
```terraform
16+
data "supabase_apikeys" "production" {
17+
project_ref = "mayuaycdtijbctgqbycg"
18+
}
19+
```
1420

1521
<!-- schema generated by tfplugindocs -->
1622
## Schema
1723

1824
### Required
1925

20-
- `project_id` (String) Project identifier
26+
- `project_ref` (String) Project reference ID
2127

2228
### Read-Only
2329

docs/schema.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -289,7 +289,7 @@
289289
"description_kind": "markdown"
290290
}
291291
},
292-
"supabase_project_apikeys": {
292+
"supabase_apikeys": {
293293
"version": 0,
294294
"block": {
295295
"attributes": {

docs/tutorial.md

Lines changed: 5 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -42,19 +42,18 @@ resource "supabase_project" "production" {
4242
}
4343
}
4444
45-
# Retrieve project API keys
46-
data "supabase_project_apikeys" "production" {
45+
# Retrieve project API keys (careful with sensitive data!)
46+
data "supabase_apikeys" "production" {
4747
project_ref = supabase_project.production.id
4848
}
4949
50-
# Output the API keys (careful with sensitive data!)
5150
output "anon_key" {
52-
value = data.supabase_project_apikeys.production.anon_key
51+
value = data.supabase_apikeys.production.anon_key
5352
sensitive = true
5453
}
5554
5655
output "service_role_key" {
57-
value = data.supabase_project_apikeys.production.service_role_key
56+
value = data.supabase_apikeys.production.service_role_key
5857
sensitive = true
5958
}
6059
```
@@ -80,7 +79,7 @@ import {
8079
id = var.linked_project
8180
}
8281
83-
# Create a project resource
82+
# Import a project resource
8483
resource "supabase_project" "production" {
8584
organization_id = "<your-org-id>"
8685
name = "tf-example"
@@ -91,11 +90,6 @@ resource "supabase_project" "production" {
9190
ignore_changes = [database_password]
9291
}
9392
}
94-
95-
# Retrieve project API keys
96-
data "supabase_project_apikeys" "production" {
97-
project_ref = supabase_project.production.id
98-
}
9993
```
10094

10195
Run `terraform -chdir=module apply`. Enter the ID of your Supabase project at the prompt. If your local TF state is empty, your project will be imported from remote rather than recreated.
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
data "supabase_apikeys" "production" {
2+
project_ref = "mayuaycdtijbctgqbycg"
3+
}

examples/data-sources/supabase_project_apikeys/data-source.tf

Lines changed: 0 additions & 3 deletions
This file was deleted.

examples/examples.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,6 @@ var (
1313
BranchDataSourceConfig string
1414
//go:embed data-sources/supabase_pooler/data-source.tf
1515
PoolerDataSourceConfig string
16-
//go:embed data-sources/supabase_project_apikeys/data-source.tf
17-
ProjectAPIKeysDataSourceConfig string
16+
//go:embed data-sources/supabase_apikeys/data-source.tf
17+
APIKeysDataSourceConfig string
1818
)

internal/provider/project_apikeys_data_source.go renamed to internal/provider/apikeys_data_source.go

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -15,31 +15,31 @@ import (
1515
)
1616

1717
// Ensure provider defined types fully satisfy framework interfaces.
18-
var _ datasource.DataSource = &ProjectAPIKeysDataSource{}
18+
var _ datasource.DataSource = &APIKeysDataSource{}
1919

20-
func NewProjectAPIKeysDataSource() datasource.DataSource {
21-
return &ProjectAPIKeysDataSource{}
20+
func NewAPIKeysDataSource() datasource.DataSource {
21+
return &APIKeysDataSource{}
2222
}
2323

24-
// ProjectAPIKeysDataSource defines the data source implementation.
25-
type ProjectAPIKeysDataSource struct {
24+
// APIKeysDataSource defines the data source implementation.
25+
type APIKeysDataSource struct {
2626
client *api.ClientWithResponses
2727
}
2828

29-
// ProjectAPIKeysDataSourceModel describes the data source data model.
30-
type ProjectAPIKeysDataSourceModel struct {
29+
// APIKeysDataSourceModel describes the data source data model.
30+
type APIKeysDataSourceModel struct {
3131
ProjectRef types.String `tfsdk:"project_ref"`
3232
AnonKey types.String `tfsdk:"anon_key"`
3333
ServiceRoleKey types.String `tfsdk:"service_role_key"`
3434
}
3535

36-
func (d *ProjectAPIKeysDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
37-
resp.TypeName = req.ProviderTypeName + "_project_apikeys"
36+
func (d *APIKeysDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
37+
resp.TypeName = req.ProviderTypeName + "_apikeys"
3838
}
3939

40-
func (d *ProjectAPIKeysDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
40+
func (d *APIKeysDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
4141
resp.Schema = schema.Schema{
42-
MarkdownDescription: "Project API Keys data source",
42+
MarkdownDescription: "API Keys data source",
4343

4444
Attributes: map[string]schema.Attribute{
4545
"project_ref": schema.StringAttribute{
@@ -60,7 +60,7 @@ func (d *ProjectAPIKeysDataSource) Schema(ctx context.Context, req datasource.Sc
6060
}
6161
}
6262

63-
func (d *ProjectAPIKeysDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
63+
func (d *APIKeysDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
6464
// Prevent panic if the provider has not been configured.
6565
if req.ProviderData == nil {
6666
return
@@ -78,8 +78,8 @@ func (d *ProjectAPIKeysDataSource) Configure(ctx context.Context, req datasource
7878
d.client = client
7979
}
8080

81-
func (d *ProjectAPIKeysDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
82-
var data ProjectAPIKeysDataSourceModel
81+
func (d *APIKeysDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
82+
var data APIKeysDataSourceModel
8383

8484
// Read Terraform configuration data into the model
8585
resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
@@ -89,12 +89,12 @@ func (d *ProjectAPIKeysDataSource) Read(ctx context.Context, req datasource.Read
8989

9090
httpResp, err := d.client.V1GetProjectApiKeysWithResponse(ctx, data.ProjectRef.ValueString(), &api.V1GetProjectApiKeysParams{})
9191
if err != nil {
92-
resp.Diagnostics.AddError("Client Error", fmt.Sprintf("Unable to read project API keys, got error: %s", err))
92+
resp.Diagnostics.AddError("Client Error", fmt.Sprintf("Unable to read API keys, got error: %s", err))
9393
return
9494
}
9595

9696
if httpResp.JSON200 == nil {
97-
resp.Diagnostics.AddError("Client Error", fmt.Sprintf("Unable to read project API keys, got status %d: %s", httpResp.StatusCode(), httpResp.Body))
97+
resp.Diagnostics.AddError("Client Error", fmt.Sprintf("Unable to read API keys, got status %d: %s", httpResp.StatusCode(), httpResp.Body))
9898
return
9999
}
100100

@@ -107,7 +107,7 @@ func (d *ProjectAPIKeysDataSource) Read(ctx context.Context, req datasource.Read
107107
}
108108
}
109109

110-
tflog.Trace(ctx, "read project API keys")
110+
tflog.Trace(ctx, "read API keys")
111111

112112
// Save data into Terraform state
113113
resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)

internal/provider/project_apikeys_data_source_test.go renamed to internal/provider/apikeys_data_source_test.go

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -37,10 +37,10 @@ func TestAccProjectAPIKeysDataSource(t *testing.T) {
3737
Steps: []resource.TestStep{
3838
// Read testing
3939
{
40-
Config: examples.ProjectAPIKeysDataSourceConfig,
40+
Config: examples.APIKeysDataSourceConfig,
4141
Check: resource.ComposeAggregateTestCheckFunc(
42-
resource.TestCheckResourceAttr("data.supabase_project_apikeys.production", "anon_key", "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.anon"),
43-
resource.TestCheckResourceAttr("data.supabase_project_apikeys.production", "service_role_key", "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.service_role"),
42+
resource.TestCheckResourceAttr("data.supabase_apikeys.production", "anon_key", "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.anon"),
43+
resource.TestCheckResourceAttr("data.supabase_apikeys.production", "service_role_key", "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.service_role"),
4444
),
4545
},
4646
},

internal/provider/provider.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@ func (p *SupabaseProvider) DataSources(ctx context.Context) []func() datasource.
9696
return []func() datasource.DataSource{
9797
NewBranchDataSource,
9898
NewPoolerDataSource,
99-
NewProjectAPIKeysDataSource,
99+
NewAPIKeysDataSource,
100100
}
101101
}
102102

0 commit comments

Comments
 (0)