Skip to content

Commit 95b6e0d

Browse files
authored
Merge pull request #100 from databrickslabs/cleanup-and-tests
[CLEANUP] Making VSCode highlight only new issues
2 parents 3f56ba9 + f57c17d commit 95b6e0d

39 files changed

+376
-376
lines changed

databricks/azure_auth.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -87,10 +87,10 @@ func (a *AzureAuth) getWorkspaceID(config *service.DBApiClientConfig) error {
8787
"Authorization": "Bearer " + a.ManagementToken,
8888
}
8989
type apiVersion struct {
90-
ApiVersion string `url:"api-version"`
90+
APIVersion string `url:"api-version"`
9191
}
9292
uriPayload := apiVersion{
93-
ApiVersion: "2018-04-01",
93+
APIVersion: "2018-04-01",
9494
}
9595
var responseMap map[string]interface{}
9696
resp, err := service.PerformQuery(config, http.MethodGet, url, "2.0", headers, false, true, uriPayload, nil)

databricks/data_source_databricks_dbfs_file.go

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -9,22 +9,22 @@ func dataSourceDBFSFile() *schema.Resource {
99
return &schema.Resource{
1010
Read: dataSourceDBFSFileRead,
1111
Schema: map[string]*schema.Schema{
12-
"path": &schema.Schema{
12+
"path": {
1313
Type: schema.TypeString,
1414
Required: true,
1515
ForceNew: true,
1616
},
17-
"limit_file_size": &schema.Schema{
17+
"limit_file_size": {
1818
Type: schema.TypeBool,
1919
Required: true,
2020
ForceNew: true,
2121
},
22-
"content": &schema.Schema{
22+
"content": {
2323
Type: schema.TypeString,
2424
Computed: true,
2525
ForceNew: true,
2626
},
27-
"file_size": &schema.Schema{
27+
"file_size": {
2828
Type: schema.TypeInt,
2929
Computed: true,
3030
},

databricks/data_source_databricks_dbfs_file_paths.go

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,17 +9,17 @@ func dataSourceDBFSFilePaths() *schema.Resource {
99
return &schema.Resource{
1010
Read: dataSourceDBFSFilePathsRead,
1111
Schema: map[string]*schema.Schema{
12-
"path": &schema.Schema{
12+
"path": {
1313
Type: schema.TypeString,
1414
Required: true,
1515
ForceNew: true,
1616
},
17-
"recursive": &schema.Schema{
17+
"recursive": {
1818
Type: schema.TypeBool,
1919
Required: true,
2020
ForceNew: true,
2121
},
22-
"path_list": &schema.Schema{
22+
"path_list": {
2323
Type: schema.TypeSet,
2424
Computed: true,
2525
Elem: &schema.Resource{

databricks/data_source_databricks_default_user_roles.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,12 +25,12 @@ func dataSourceDefaultUserRoles() *schema.Resource {
2525
return err
2626
},
2727
Schema: map[string]*schema.Schema{
28-
"default_username": &schema.Schema{
28+
"default_username": {
2929
Type: schema.TypeString,
3030
Required: true,
3131
ForceNew: true,
3232
},
33-
"roles": &schema.Schema{
33+
"roles": {
3434
Type: schema.TypeList,
3535
Computed: true,
3636
Elem: &schema.Schema{Type: schema.TypeString},

databricks/data_source_databricks_notebook.go

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -12,12 +12,12 @@ func dataSourceNotebook() *schema.Resource {
1212
Read: dataSourceNotebookRead,
1313
Schema: map[string]*schema.Schema{
1414

15-
"path": &schema.Schema{
15+
"path": {
1616
Type: schema.TypeString,
1717
Required: true,
1818
ForceNew: true,
1919
},
20-
"format": &schema.Schema{
20+
"format": {
2121
Type: schema.TypeString,
2222
Required: true,
2323
ForceNew: true,
@@ -27,19 +27,19 @@ func dataSourceNotebook() *schema.Resource {
2727
string(model.HTML),
2828
}, false),
2929
},
30-
"content": &schema.Schema{
30+
"content": {
3131
Type: schema.TypeString,
3232
Computed: true,
3333
},
34-
"language": &schema.Schema{
34+
"language": {
3535
Type: schema.TypeString,
3636
Computed: true,
3737
},
38-
"object_type": &schema.Schema{
38+
"object_type": {
3939
Type: schema.TypeString,
4040
Computed: true,
4141
},
42-
"object_id": &schema.Schema{
42+
"object_id": {
4343
Type: schema.TypeInt,
4444
Computed: true,
4545
},

databricks/data_source_databricks_notebook_paths.go

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,17 +12,17 @@ func dataSourceNotebookPaths() *schema.Resource {
1212
Read: dataSourceNotebookPathsRead,
1313
Schema: map[string]*schema.Schema{
1414

15-
"path": &schema.Schema{
15+
"path": {
1616
Type: schema.TypeString,
1717
Required: true,
1818
ForceNew: true,
1919
},
20-
"recursive": &schema.Schema{
20+
"recursive": {
2121
Type: schema.TypeBool,
2222
Required: true,
2323
ForceNew: true,
2424
},
25-
"notebook_path_list": &schema.Schema{
25+
"notebook_path_list": {
2626
Type: schema.TypeSet,
2727
Computed: true,
2828
Elem: &schema.Resource{

databricks/data_source_databricks_zones.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,12 +24,12 @@ func dataSourceClusterZones() *schema.Resource {
2424
return err
2525
},
2626
Schema: map[string]*schema.Schema{
27-
"default_zone": &schema.Schema{
27+
"default_zone": {
2828
Type: schema.TypeString,
2929
Computed: true,
3030
ForceNew: true,
3131
},
32-
"zones": &schema.Schema{
32+
"zones": {
3333
Type: schema.TypeList,
3434
Computed: true,
3535
Elem: &schema.Schema{Type: schema.TypeString},

databricks/mounts_test.go

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -8,16 +8,16 @@ import (
88

99
func TestValidateMountDirectory(t *testing.T) {
1010
testCases := []struct {
11-
directory string
12-
errorCount int
13-
}{
14-
{"", 0},
15-
{"/directory", 0},
16-
{"directory", 1},
17-
}
18-
for _, tc := range testCases {
19-
_, errs := ValidateMountDirectory(tc.directory, "key")
20-
21-
assert.Lenf(t, errs, tc.errorCount, "directory '%s' does not generate the expected error count", tc.directory)
22-
}
11+
directory string
12+
errorCount int
13+
}{
14+
{"", 0},
15+
{"/directory", 0},
16+
{"directory", 1},
17+
}
18+
for _, tc := range testCases {
19+
_, errs := ValidateMountDirectory(tc.directory, "key")
20+
21+
assert.Lenf(t, errs, tc.errorCount, "directory '%s' does not generate the expected error count", tc.directory)
22+
}
2323
}

databricks/provider.go

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -50,30 +50,30 @@ func Provider(version string) terraform.ResourceProvider {
5050
"databricks_mws_workspaces": resourceMWSWorkspaces(),
5151
},
5252
Schema: map[string]*schema.Schema{
53-
"host": &schema.Schema{
53+
"host": {
5454
Type: schema.TypeString,
5555
Optional: true,
5656
DefaultFunc: schema.EnvDefaultFunc("DATABRICKS_HOST", nil),
5757
},
58-
"token": &schema.Schema{
58+
"token": {
5959
Type: schema.TypeString,
6060
Optional: true,
6161
Sensitive: true,
6262
DefaultFunc: schema.EnvDefaultFunc("DATABRICKS_TOKEN", nil),
6363
ConflictsWith: []string{"basic_auth"},
6464
},
65-
"basic_auth": &schema.Schema{
65+
"basic_auth": {
6666
Type: schema.TypeList,
6767
Optional: true,
6868
MaxItems: 1,
6969
Elem: &schema.Resource{
7070
Schema: map[string]*schema.Schema{
71-
"username": &schema.Schema{
71+
"username": {
7272
Type: schema.TypeString,
7373
Required: true,
7474
DefaultFunc: schema.EnvDefaultFunc("DATABRICKS_USERNAME", nil),
7575
},
76-
"password": &schema.Schema{
76+
"password": {
7777
Type: schema.TypeString,
7878
Sensitive: true,
7979
Required: true,
@@ -83,7 +83,7 @@ func Provider(version string) terraform.ResourceProvider {
8383
},
8484
ConflictsWith: []string{"token"},
8585
},
86-
"config_file": &schema.Schema{
86+
"config_file": {
8787
Type: schema.TypeString,
8888
Optional: true,
8989
DefaultFunc: schema.EnvDefaultFunc("DATABRICKS_CONFIG_FILE", "~/.databrickscfg"),
@@ -92,14 +92,14 @@ func Provider(version string) terraform.ResourceProvider {
9292
"in ~/.databrickscfg. Check https://docs.databricks.com/dev-tools/cli/index.html#set-up-authentication for docs. Config\n" +
9393
"file credetials will only be used when host/token are not provided.",
9494
},
95-
"profile": &schema.Schema{
95+
"profile": {
9696
Type: schema.TypeString,
9797
Optional: true,
9898
Default: "DEFAULT",
9999
Description: "Connection profile specified within ~/.databrickscfg. Please check\n" +
100100
"https://docs.databricks.com/dev-tools/cli/index.html#connection-profiles for documentation.",
101101
},
102-
"azure_auth": &schema.Schema{
102+
"azure_auth": {
103103
Type: schema.TypeMap,
104104
Optional: true,
105105
Elem: &schema.Resource{

databricks/resource_databricks_aws_s3_mount.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ func resourceAWSS3Mount() *schema.Resource {
1212
Delete: resourceAWSS3Delete,
1313

1414
Schema: map[string]*schema.Schema{
15-
"cluster_id": &schema.Schema{
15+
"cluster_id": {
1616
Type: schema.TypeString,
1717
Required: true,
1818
ForceNew: true,

0 commit comments

Comments
 (0)