diff --git a/.github/workflows/golangci-lint.yml b/.github/workflows/golangci-lint.yml new file mode 100644 index 000000000..0343b7075 --- /dev/null +++ b/.github/workflows/golangci-lint.yml @@ -0,0 +1,24 @@ +name: golangci-lint +on: + push: + branches: + - main + - master + pull_request: + +permissions: + contents: read + +jobs: + golangci: + name: lint + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-go@v5 + with: + go-version: stable + - name: golangci-lint + uses: golangci/golangci-lint-action@v8 + with: + version: v2.1 \ No newline at end of file diff --git a/.golangci.yml b/.golangci.yml new file mode 100644 index 000000000..055d4ebb2 --- /dev/null +++ b/.golangci.yml @@ -0,0 +1,213 @@ +version: "2" +linters: + default: none + enable: + - asciicheck + - bidichk + - bodyclose + - dogsled + - dupl + - errcheck + - errname + - errorlint + - exhaustruct + - forcetypeassert + - gocognit + - gocritic + - godot + - gomodguard + - gosec + - govet + - importas + - ineffassign + - makezero + - misspell + - nestif + - nilnil + # - noctx + # - paralleltest + - revive + - staticcheck + # - tparallel + - unconvert + - unused + settings: + dupl: + threshold: 412 + godot: + scope: all + capital: true + exhaustruct: + include: + - httpmw\.\w+ + - github.com/coder/coder/v2/coderd/database\.[^G][^e][^t]\w+Params + gocognit: + min-complexity: 300 + goconst: + min-len: 4 + min-occurrences: 3 + gocritic: + enabled-checks: + - badLock + - badRegexp + - boolExprSimplify + - builtinShadow + - builtinShadowDecl + - commentedOutImport + - deferUnlambda + - dupImport + - dynamicFmtString + - emptyDecl + - emptyFallthrough + - emptyStringTest + - evalOrder + - externalErrorReassign + - filepathJoin + - hexLiteral + - httpNoBody + - importShadow + - indexAlloc + - initClause + - methodExprCall + - nestingReduce + - nilValReturn + - preferFilepathJoin + - rangeAppendAll + - regexpPattern + - redundantSprint + - regexpSimplify + - ruleguard + - sliceClear + - sortSlice + - sprintfQuotedString + - sqlQuery + - stringConcatSimplify + - stringXbytes + - todoCommentWithoutDetail + - tooManyResultsChecker + - truncateCmp + - typeAssertChain + - typeDefFirst + - unlabelStmt + - weakCond + - whyNoLint + settings: + ruleguard: + failOn: all + rules: ${base-path}/scripts/rules.go + gosec: + excludes: + - G601 + govet: + disable: + - loopclosure + importas: + no-unaliased: true + misspell: + locale: US + ignore-rules: + - trialer + nestif: + min-complexity: 20 + revive: + severity: warning + rules: + - name: atomic + - name: bare-return + - name: blank-imports + - name: bool-literal-in-expr + - name: call-to-gc + - name: confusing-results + - name: constant-logical-expr + - name: context-as-argument + - name: context-keys-type + # - name: deep-exit + - name: defer + - name: dot-imports + - name: duplicated-imports + - name: early-return + - name: empty-block + - name: empty-lines + - name: error-naming + - name: error-return + - name: error-strings + - name: errorf + - name: exported + - name: flag-parameter + - name: get-return + - name: identical-branches + - name: if-return + - name: import-shadowing + - name: increment-decrement + - name: indent-error-flow + - name: modifies-value-receiver + - name: package-comments + - name: range + - name: receiver-naming + - name: redefines-builtin-id + - name: string-of-int + - name: struct-tag + - name: superfluous-else + - name: time-naming + - name: unconditional-recursion + - name: unexported-naming + - name: unexported-return + - name: unhandled-error + - name: unnecessary-stmt + - name: unreachable-code + - name: unused-parameter + - name: unused-receiver + - name: var-declaration + - name: var-naming + - name: waitgroup-by-value + staticcheck: + checks: + - all + - SA4006 # Detects redundant assignments + - SA4009 # Detects redundant variable declarations + - SA1019 + exclusions: + generated: lax + presets: + - comments + - common-false-positives + - legacy + - std-error-handling + rules: + - linters: + - errcheck + - exhaustruct + - forcetypeassert + path: _test\.go + - linters: + - exhaustruct + path: scripts/* + - linters: + - ALL + path: scripts/rules.go + paths: + - scripts/rules.go + - coderd/database/dbmem + - node_modules + - .git + - third_party$ + - builtin$ + - examples$ +issues: + max-issues-per-linter: 0 + max-same-issues: 0 + fix: true +formatters: + enable: + - goimports + - gofmt + exclusions: + generated: lax + paths: + - scripts/rules.go + - coderd/database/dbmem + - node_modules + - .git + - third_party$ + - builtin$ + - examples$ diff --git a/cmd/readmevalidation/coderresources.go b/cmd/readmevalidation/coderresources.go index 98a953c0b..01ad5fe03 100644 --- a/cmd/readmevalidation/coderresources.go +++ b/cmd/readmevalidation/coderresources.go @@ -3,7 +3,6 @@ package main import ( "bufio" "errors" - "fmt" "log" "net/url" "os" @@ -12,6 +11,7 @@ import ( "slices" "strings" + "golang.org/x/xerrors" "gopkg.in/yaml.v3" ) @@ -27,7 +27,7 @@ type coderResourceFrontmatter struct { // coderResourceReadme represents a README describing a Terraform resource used // to help create Coder workspaces. As of 2025-04-15, this encapsulates both -// Coder Modules and Coder Templates +// Coder Modules and Coder Templates. type coderResourceReadme struct { resourceType string filePath string @@ -37,14 +37,14 @@ type coderResourceReadme struct { func validateCoderResourceDisplayName(displayName *string) error { if displayName != nil && *displayName == "" { - return errors.New("if defined, display_name must not be empty string") + return xerrors.New("if defined, display_name must not be empty string") } return nil } func validateCoderResourceDescription(description string) error { if description == "" { - return errors.New("frontmatter description cannot be empty") + return xerrors.New("frontmatter description cannot be empty") } return nil } @@ -53,29 +53,29 @@ func validateCoderResourceIconURL(iconURL string) []error { problems := []error{} if iconURL == "" { - problems = append(problems, errors.New("icon URL cannot be empty")) + problems = append(problems, xerrors.New("icon URL cannot be empty")) return problems } isAbsoluteURL := !strings.HasPrefix(iconURL, ".") && !strings.HasPrefix(iconURL, "/") if isAbsoluteURL { if _, err := url.ParseRequestURI(iconURL); err != nil { - problems = append(problems, errors.New("absolute icon URL is not correctly formatted")) + problems = append(problems, xerrors.New("absolute icon URL is not correctly formatted")) } if strings.Contains(iconURL, "?") { - problems = append(problems, errors.New("icon URLs cannot contain query parameters")) + problems = append(problems, xerrors.New("icon URLs cannot contain query parameters")) } return problems } // Would normally be skittish about having relative paths like this, but it // should be safe because we have guarantees about the structure of the - // repo, and where this logic will run + // repo, and where this logic will run. isPermittedRelativeURL := strings.HasPrefix(iconURL, "./") || strings.HasPrefix(iconURL, "/") || strings.HasPrefix(iconURL, "../../../../.icons") if !isPermittedRelativeURL { - problems = append(problems, fmt.Errorf("relative icon URL %q must either be scoped to that module's directory, or the top-level /.icons directory (this can usually be done by starting the path with \"../../../.icons\")", iconURL)) + problems = append(problems, xerrors.Errorf("relative icon URL %q must either be scoped to that module's directory, or the top-level /.icons directory (this can usually be done by starting the path with \"../../../.icons\")", iconURL)) } return problems @@ -83,7 +83,7 @@ func validateCoderResourceIconURL(iconURL string) []error { func validateCoderResourceTags(tags []string) error { if tags == nil { - return errors.New("provided tags array is nil") + return xerrors.New("provided tags array is nil") } if len(tags) == 0 { return nil @@ -91,7 +91,7 @@ func validateCoderResourceTags(tags []string) error { // All of these tags are used for the module/template filter controls in the // Registry site. Need to make sure they can all be placed in the browser - // URL without issue + // URL without issue. invalidTags := []string{} for _, t := range tags { if t != url.QueryEscape(t) { @@ -100,7 +100,7 @@ func validateCoderResourceTags(tags []string) error { } if len(invalidTags) != 0 { - return fmt.Errorf("found invalid tags (tags that cannot be used for filter state in the Registry website): [%s]", strings.Join(invalidTags, ", ")) + return xerrors.Errorf("found invalid tags (tags that cannot be used for filter state in the Registry website): [%s]", strings.Join(invalidTags, ", ")) } return nil } @@ -110,7 +110,7 @@ func validateCoderResourceTags(tags []string) error { // parse any Terraform code snippets, and make some deeper guarantees about how // it's structured. Just validating whether it *can* be parsed as Terraform // would be a big improvement. -var terraformVersionRe = regexp.MustCompile("^\\s*\\bversion\\s+=") +var terraformVersionRe = regexp.MustCompile(`^\s*\bversion\s+=`) func validateCoderResourceReadmeBody(body string) []error { trimmed := strings.TrimSpace(body) @@ -132,7 +132,7 @@ func validateCoderResourceReadmeBody(body string) []error { // Code assumes that invalid headers would've already been handled by // the base validation function, so we don't need to check deeper if the - // first line isn't an h1 + // first line isn't an h1. if lineNum == 1 { if !strings.HasPrefix(nextLine, "# ") { break @@ -147,7 +147,7 @@ func validateCoderResourceReadmeBody(body string) []error { terraformCodeBlockCount++ } if strings.HasPrefix(nextLine, "```hcl") { - errs = append(errs, errors.New("all .hcl language references must be converted to .tf")) + errs = append(errs, xerrors.New("all .hcl language references must be converted to .tf")) } continue } @@ -160,34 +160,34 @@ func validateCoderResourceReadmeBody(body string) []error { } // Code assumes that we can treat this case as the end of the "h1 - // section" and don't need to process any further lines + // section" and don't need to process any further lines. if lineNum > 1 && strings.HasPrefix(nextLine, "#") { break } // Code assumes that if we've reached this point, the only other options // are: (1) empty spaces, (2) paragraphs, (3) HTML, and (4) asset - // references made via [] syntax + // references made via [] syntax. trimmedLine := strings.TrimSpace(nextLine) isParagraph := trimmedLine != "" && !strings.HasPrefix(trimmedLine, "![") && !strings.HasPrefix(trimmedLine, "<") foundParagraph = foundParagraph || isParagraph } if terraformCodeBlockCount == 0 { - errs = append(errs, errors.New("did not find Terraform code block within h1 section")) + errs = append(errs, xerrors.New("did not find Terraform code block within h1 section")) } else { if terraformCodeBlockCount > 1 { - errs = append(errs, errors.New("cannot have more than one Terraform code block in h1 section")) + errs = append(errs, xerrors.New("cannot have more than one Terraform code block in h1 section")) } if !foundTerraformVersionRef { - errs = append(errs, errors.New("did not find Terraform code block that specifies 'version' field")) + errs = append(errs, xerrors.New("did not find Terraform code block that specifies 'version' field")) } } if !foundParagraph { - errs = append(errs, errors.New("did not find paragraph within h1 section")) + errs = append(errs, xerrors.New("did not find paragraph within h1 section")) } if isInsideCodeBlock { - errs = append(errs, errors.New("code blocks inside h1 section do not all terminate before end of file")) + errs = append(errs, xerrors.New("code blocks inside h1 section do not all terminate before end of file")) } return errs @@ -220,12 +220,12 @@ func validateCoderResourceReadme(rm coderResourceReadme) []error { func parseCoderResourceReadme(resourceType string, rm readme) (coderResourceReadme, error) { fm, body, err := separateFrontmatter(rm.rawText) if err != nil { - return coderResourceReadme{}, fmt.Errorf("%q: failed to parse frontmatter: %v", rm.filePath, err) + return coderResourceReadme{}, xerrors.Errorf("%q: failed to parse frontmatter: %v", rm.filePath, err) } yml := coderResourceFrontmatter{} if err := yaml.Unmarshal([]byte(fm), &yml); err != nil { - return coderResourceReadme{}, fmt.Errorf("%q: failed to parse: %v", rm.filePath, err) + return coderResourceReadme{}, xerrors.Errorf("%q: failed to parse: %v", rm.filePath, err) } return coderResourceReadme{ @@ -257,9 +257,9 @@ func parseCoderResourceReadmeFiles(resourceType string, rms []readme) (map[strin yamlValidationErrors := []error{} for _, readme := range resources { - errors := validateCoderResourceReadme(readme) - if len(errors) > 0 { - yamlValidationErrors = append(yamlValidationErrors, errors...) + errs := validateCoderResourceReadme(readme) + if len(errs) > 0 { + yamlValidationErrors = append(yamlValidationErrors, errs...) } } if len(yamlValidationErrors) != 0 { @@ -273,8 +273,8 @@ func parseCoderResourceReadmeFiles(resourceType string, rms []readme) (map[strin } // Todo: Need to beef up this function by grabbing each image/video URL from -// the body's AST -func validateCoderResourceRelativeUrls(resources map[string]coderResourceReadme) error { +// the body's AST. +func validateCoderResourceRelativeUrls(_ map[string]coderResourceReadme) error { return nil } @@ -330,7 +330,7 @@ func aggregateCoderResourceReadmeFiles(resourceType string) ([]readme, error) { func validateAllCoderResourceFilesOfType(resourceType string) error { if !slices.Contains(supportedResourceTypes, resourceType) { - return fmt.Errorf("resource type %q is not part of supported list [%s]", resourceType, strings.Join(supportedResourceTypes, ", ")) + return xerrors.Errorf("resource type %q is not part of supported list [%s]", resourceType, strings.Join(supportedResourceTypes, ", ")) } allReadmeFiles, err := aggregateCoderResourceReadmeFiles(resourceType) diff --git a/cmd/readmevalidation/contributors.go b/cmd/readmevalidation/contributors.go index 53843dd37..c7fb8106f 100644 --- a/cmd/readmevalidation/contributors.go +++ b/cmd/readmevalidation/contributors.go @@ -1,8 +1,6 @@ package main import ( - "errors" - "fmt" "log" "net/url" "os" @@ -10,6 +8,7 @@ import ( "slices" "strings" + "golang.org/x/xerrors" "gopkg.in/yaml.v3" ) @@ -33,7 +32,7 @@ type contributorProfileReadme struct { func validateContributorDisplayName(displayName string) error { if displayName == "" { - return fmt.Errorf("missing display_name") + return xerrors.New("missing display_name") } return nil @@ -45,7 +44,7 @@ func validateContributorLinkedinURL(linkedinURL *string) error { } if _, err := url.ParseRequestURI(*linkedinURL); err != nil { - return fmt.Errorf("linkedIn URL %q is not valid: %v", *linkedinURL, err) + return xerrors.Errorf("linkedIn URL %q is not valid: %v", *linkedinURL, err) } return nil @@ -61,31 +60,31 @@ func validateContributorSupportEmail(email *string) []error { // Can't 100% validate that this is correct without actually sending // an email, and especially with some contributors being individual // developers, we don't want to do that on every single run of the CI - // pipeline. Best we can do is verify the general structure + // pipeline. Best we can do is verify the general structure. username, server, ok := strings.Cut(*email, "@") if !ok { - errs = append(errs, fmt.Errorf("email address %q is missing @ symbol", *email)) + errs = append(errs, xerrors.Errorf("email address %q is missing @ symbol", *email)) return errs } if username == "" { - errs = append(errs, fmt.Errorf("email address %q is missing username", *email)) + errs = append(errs, xerrors.Errorf("email address %q is missing username", *email)) } domain, tld, ok := strings.Cut(server, ".") if !ok { - errs = append(errs, fmt.Errorf("email address %q is missing period for server segment", *email)) + errs = append(errs, xerrors.Errorf("email address %q is missing period for server segment", *email)) return errs } if domain == "" { - errs = append(errs, fmt.Errorf("email address %q is missing domain", *email)) + errs = append(errs, xerrors.Errorf("email address %q is missing domain", *email)) } if tld == "" { - errs = append(errs, fmt.Errorf("email address %q is missing top-level domain", *email)) + errs = append(errs, xerrors.Errorf("email address %q is missing top-level domain", *email)) } if strings.Contains(*email, "?") { - errs = append(errs, errors.New("email is not allowed to contain query parameters")) + errs = append(errs, xerrors.New("email is not allowed to contain query parameters")) } return errs @@ -97,7 +96,7 @@ func validateContributorWebsite(websiteURL *string) error { } if _, err := url.ParseRequestURI(*websiteURL); err != nil { - return fmt.Errorf("linkedIn URL %q is not valid: %v", *websiteURL, err) + return xerrors.Errorf("linkedIn URL %q is not valid: %v", *websiteURL, err) } return nil @@ -105,14 +104,14 @@ func validateContributorWebsite(websiteURL *string) error { func validateContributorStatus(status string) error { if !slices.Contains(validContributorStatuses, status) { - return fmt.Errorf("contributor status %q is not valid", status) + return xerrors.Errorf("contributor status %q is not valid", status) } return nil } // Can't validate the image actually leads to a valid resource in a pure -// function, but can at least catch obvious problems +// function, but can at least catch obvious problems. func validateContributorAvatarURL(avatarURL *string) []error { if avatarURL == nil { return nil @@ -120,17 +119,17 @@ func validateContributorAvatarURL(avatarURL *string) []error { errs := []error{} if *avatarURL == "" { - errs = append(errs, errors.New("avatar URL must be omitted or non-empty string")) + errs = append(errs, xerrors.New("avatar URL must be omitted or non-empty string")) return errs } // Have to use .Parse instead of .ParseRequestURI because this is the - // one field that's allowed to be a relative URL + // one field that's allowed to be a relative URL. if _, err := url.Parse(*avatarURL); err != nil { - errs = append(errs, fmt.Errorf("URL %q is not a valid relative or absolute URL", *avatarURL)) + errs = append(errs, xerrors.Errorf("URL %q is not a valid relative or absolute URL", *avatarURL)) } if strings.Contains(*avatarURL, "?") { - errs = append(errs, errors.New("avatar URL is not allowed to contain search parameters")) + errs = append(errs, xerrors.New("avatar URL is not allowed to contain search parameters")) } matched := false @@ -143,7 +142,7 @@ func validateContributorAvatarURL(avatarURL *string) []error { if !matched { segments := strings.Split(*avatarURL, ".") fileExtension := segments[len(segments)-1] - errs = append(errs, fmt.Errorf("avatar URL '.%s' does not end in a supported file format: [%s]", fileExtension, strings.Join(supportedAvatarFileFormats, ", "))) + errs = append(errs, xerrors.Errorf("avatar URL '.%s' does not end in a supported file format: [%s]", fileExtension, strings.Join(supportedAvatarFileFormats, ", "))) } return errs @@ -178,12 +177,12 @@ func validateContributorReadme(rm contributorProfileReadme) []error { func parseContributorProfile(rm readme) (contributorProfileReadme, error) { fm, _, err := separateFrontmatter(rm.rawText) if err != nil { - return contributorProfileReadme{}, fmt.Errorf("%q: failed to parse frontmatter: %v", rm.filePath, err) + return contributorProfileReadme{}, xerrors.Errorf("%q: failed to parse frontmatter: %v", rm.filePath, err) } yml := contributorProfileFrontmatter{} if err := yaml.Unmarshal([]byte(fm), &yml); err != nil { - return contributorProfileReadme{}, fmt.Errorf("%q: failed to parse: %v", rm.filePath, err) + return contributorProfileReadme{}, xerrors.Errorf("%q: failed to parse: %v", rm.filePath, err) } return contributorProfileReadme{ @@ -204,7 +203,7 @@ func parseContributorFiles(readmeEntries []readme) (map[string]contributorProfil } if prev, alreadyExists := profilesByNamespace[p.namespace]; alreadyExists { - yamlParsingErrors = append(yamlParsingErrors, fmt.Errorf("%q: namespace %q conflicts with namespace from %q", p.filePath, p.namespace, prev.filePath)) + yamlParsingErrors = append(yamlParsingErrors, xerrors.Errorf("%q: namespace %q conflicts with namespace from %q", p.filePath, p.namespace, prev.filePath)) continue } profilesByNamespace[p.namespace] = p @@ -272,7 +271,7 @@ func aggregateContributorReadmeFiles() ([]readme, error) { func validateContributorRelativeUrls(contributors map[string]contributorProfileReadme) error { // This function only validates relative avatar URLs for now, but it can be - // beefed up to validate more in the future + // beefed up to validate more in the future. var errs []error for _, con := range contributors { if con.frontmatter.AvatarURL == nil { @@ -288,7 +287,7 @@ func validateContributorRelativeUrls(contributors map[string]contributorProfileR isAvatarInApprovedSpot := strings.HasPrefix(*con.frontmatter.AvatarURL, "./.images/") || strings.HasPrefix(*con.frontmatter.AvatarURL, ".images/") if !isAvatarInApprovedSpot { - errs = append(errs, fmt.Errorf("%q: relative avatar URLs cannot be placed outside a user's namespaced directory", con.filePath)) + errs = append(errs, xerrors.Errorf("%q: relative avatar URLs cannot be placed outside a user's namespaced directory", con.filePath)) continue } @@ -296,7 +295,7 @@ func validateContributorRelativeUrls(contributors map[string]contributorProfileR *con.frontmatter.AvatarURL _, err := os.Stat(absolutePath) if err != nil { - errs = append(errs, fmt.Errorf("%q: path %q does not point to image in file system", con.filePath, absolutePath)) + errs = append(errs, xerrors.Errorf("%q: relative avatar path %q does not point to image in file system", con.filePath, absolutePath)) } } diff --git a/cmd/readmevalidation/errors.go b/cmd/readmevalidation/errors.go index d9dbb1792..92264fd0c 100644 --- a/cmd/readmevalidation/errors.go +++ b/cmd/readmevalidation/errors.go @@ -1,6 +1,10 @@ package main -import "fmt" +import ( + "fmt" + + "golang.org/x/xerrors" +) // validationPhaseError represents an error that occurred during a specific // phase of README validation. It should be used to collect ALL validation @@ -24,5 +28,5 @@ func (vpe validationPhaseError) Error() string { } func addFilePathToError(filePath string, err error) error { - return fmt.Errorf("%q: %v", filePath, err) + return xerrors.Errorf("%q: %v", filePath, err) } diff --git a/cmd/readmevalidation/main.go b/cmd/readmevalidation/main.go index 6a8b73fce..a218c69de 100644 --- a/cmd/readmevalidation/main.go +++ b/cmd/readmevalidation/main.go @@ -17,11 +17,11 @@ import ( var logger = slog.Make(sloghuman.Sink(os.Stdout)) func main() { - logger.Info(context.Background(), "Starting README validation") + logger.Info(context.Background(), "starting README validation") // If there are fundamental problems with how the repo is structured, we // can't make any guarantees that any further validations will be relevant - // or accurate + // or accurate. err := validateRepoStructure() if err != nil { log.Println(err) @@ -39,7 +39,7 @@ func main() { } if len(errs) == 0 { - logger.Info(context.Background(), "Processed all READMEs in directory", "dir", rootRegistryPath) + logger.Info(context.Background(), "processed all READMEs in directory", "dir", rootRegistryPath) os.Exit(0) } for _, err := range errs { diff --git a/cmd/readmevalidation/readmefiles.go b/cmd/readmevalidation/readmefiles.go index 29676527a..2fd3b87a4 100644 --- a/cmd/readmevalidation/readmefiles.go +++ b/cmd/readmevalidation/readmefiles.go @@ -2,10 +2,10 @@ package main import ( "bufio" - "errors" - "fmt" "regexp" "strings" + + "golang.org/x/xerrors" ) const rootRegistryPath = "./registry" @@ -23,9 +23,9 @@ type readme struct { // from the main README body, returning both values in that order. It does not // validate whether the structure of the frontmatter is valid (i.e., that it's // structured as YAML). -func separateFrontmatter(readmeText string) (string, string, error) { +func separateFrontmatter(readmeText string) (readmeFrontmatter string, readmeBody string, err error) { if readmeText == "" { - return "", "", errors.New("README is empty") + return "", "", xerrors.New("README is empty") } const fence = "---" @@ -41,7 +41,7 @@ func separateFrontmatter(readmeText string) (string, string, error) { continue } // Break early if the very first line wasn't a fence, because then we - // know for certain that the README has problems + // know for certain that the README has problems. if fenceCount == 0 { break } @@ -49,7 +49,7 @@ func separateFrontmatter(readmeText string) (string, string, error) { // It should be safe to trim each line of the frontmatter on a per-line // basis, because there shouldn't be any extra meaning attached to the // indentation. The same does NOT apply to the README; best we can do is - // gather all the lines, and then trim around it + // gather all the lines, and then trim around it. if inReadmeBody := fenceCount >= 2; inReadmeBody { body += nextLine + "\n" } else { @@ -57,31 +57,31 @@ func separateFrontmatter(readmeText string) (string, string, error) { } } if fenceCount < 2 { - return "", "", errors.New("README does not have two sets of frontmatter fences") + return "", "", xerrors.New("README does not have two sets of frontmatter fences") } if fm == "" { - return "", "", errors.New("readme has frontmatter fences but no frontmatter content") + return "", "", xerrors.New("readme has frontmatter fences but no frontmatter content") } return fm, strings.TrimSpace(body), nil } -var readmeHeaderRe = regexp.MustCompile("^(#{1,})(\\s*)") +var readmeHeaderRe = regexp.MustCompile(`^(#+)(\s*)`) // Todo: This seems to work okay for now, but the really proper way of doing -// this is by parsing this as an AST, and then checking the resulting nodes +// this is by parsing this as an AST, and then checking the resulting nodes. func validateReadmeBody(body string) []error { trimmed := strings.TrimSpace(body) if trimmed == "" { - return []error{errors.New("README body is empty")} + return []error{xerrors.New("README body is empty")} } // If the very first line of the README, there's a risk that the rest of the // validation logic will break, since we don't have many guarantees about - // how the README is actually structured + // how the README is actually structured. if !strings.HasPrefix(trimmed, "# ") { - return []error{errors.New("README body must start with ATX-style h1 header (i.e., \"# \")")} + return []error{xerrors.New("README body must start with ATX-style h1 header (i.e., \"# \")")} } var errs []error @@ -95,7 +95,7 @@ func validateReadmeBody(body string) []error { // Have to check this because a lot of programming languages support # // comments (including Terraform), and without any context, there's no - // way to tell the difference between a markdown header and code comment + // way to tell the difference between a markdown header and code comment. if strings.HasPrefix(nextLine, "```") { isInCodeBlock = !isInCodeBlock continue @@ -111,7 +111,7 @@ func validateReadmeBody(body string) []error { spaceAfterHeader := headerGroups[2] if spaceAfterHeader == "" { - errs = append(errs, errors.New("header does not have space between header characters and main header text")) + errs = append(errs, xerrors.New("header does not have space between header characters and main header text")) } nextHeaderLevel := len(headerGroups[1]) @@ -122,26 +122,26 @@ func validateReadmeBody(body string) []error { } // If we have obviously invalid headers, it's not really safe to keep - // proceeding with the rest of the content + // proceeding with the rest of the content. if nextHeaderLevel == 1 { - errs = append(errs, errors.New("READMEs cannot contain more than h1 header")) + errs = append(errs, xerrors.New("READMEs cannot contain more than h1 header")) break } if nextHeaderLevel > 6 { - errs = append(errs, fmt.Errorf("README/HTML files cannot have headers exceed level 6 (found level %d)", nextHeaderLevel)) + errs = append(errs, xerrors.Errorf("README/HTML files cannot have headers exceed level 6 (found level %d)", nextHeaderLevel)) break } // This is something we need to enforce for accessibility, not just for // the Registry website, but also when users are viewing the README - // files in the GitHub web view + // files in the GitHub web view. if nextHeaderLevel > latestHeaderLevel && nextHeaderLevel != (latestHeaderLevel+1) { - errs = append(errs, fmt.Errorf("headers are not allowed to increase more than 1 level at a time")) + errs = append(errs, xerrors.New("headers are not allowed to increase more than 1 level at a time")) continue } // As long as the above condition passes, there's no problems with - // going up a header level or going down 1+ header levels + // going up a header level or going down 1+ header levels. latestHeaderLevel = nextHeaderLevel } @@ -154,24 +154,20 @@ func validateReadmeBody(body string) []error { type validationPhase string const ( - // validationPhaseFileStructureValidation indicates when the entire Registry + // ValidationPhaseFileStructureValidation indicates when the entire Registry // directory is being verified for having all files be placed in the file // system as expected. validationPhaseFileStructureValidation validationPhase = "File structure validation" - // validationPhaseFileLoad indicates when README files are being read from - // the file system + // ValidationPhaseFileLoad indicates when README files are being read from + // the file system. validationPhaseFileLoad = "Filesystem reading" - // validationPhaseReadmeParsing indicates when a README's frontmatter is + // ValidationPhaseReadmeParsing indicates when a README's frontmatter is // being parsed as YAML. This phase does not include YAML validation. validationPhaseReadmeParsing = "README parsing" - // validationPhaseReadmeValidation indicates when a README's frontmatter is - // being validated as proper YAML with expected keys. - validationPhaseReadmeValidation = "README validation" - - // validationPhaseAssetCrossReference indicates when a README's frontmatter + // ValidationPhaseAssetCrossReference indicates when a README's frontmatter // is having all its relative URLs be validated for whether they point to // valid resources. validationPhaseAssetCrossReference = "Cross-referencing relative asset URLs" diff --git a/cmd/readmevalidation/repostructure.go b/cmd/readmevalidation/repostructure.go index 11bd920d9..01b40a3d6 100644 --- a/cmd/readmevalidation/repostructure.go +++ b/cmd/readmevalidation/repostructure.go @@ -2,14 +2,15 @@ package main import ( "errors" - "fmt" "os" "path" "slices" "strings" + + "golang.org/x/xerrors" ) -var supportedUserNameSpaceDirectories = append(supportedResourceTypes[:], ".icons", ".images") +var supportedUserNameSpaceDirectories = append(supportedResourceTypes, ".icons", ".images") func validateCoderResourceSubdirectory(dirPath string) []error { errs := []error{} @@ -17,7 +18,7 @@ func validateCoderResourceSubdirectory(dirPath string) []error { subDir, err := os.Stat(dirPath) if err != nil { // It's valid for a specific resource directory not to exist. It's just - // that if it does exist, it must follow specific rules + // that if it does exist, it must follow specific rules. if !errors.Is(err, os.ErrNotExist) { errs = append(errs, addFilePathToError(dirPath, err)) } @@ -25,7 +26,7 @@ func validateCoderResourceSubdirectory(dirPath string) []error { } if !subDir.IsDir() { - errs = append(errs, fmt.Errorf("%q: path is not a directory", dirPath)) + errs = append(errs, xerrors.Errorf("%q: path is not a directory", dirPath)) return errs } @@ -38,7 +39,7 @@ func validateCoderResourceSubdirectory(dirPath string) []error { // The .coder subdirectories are sometimes generated as part of Bun // tests. These subdirectories will never be committed to the repo, but // in the off chance that they don't get cleaned up properly, we want to - // skip over them + // skip over them. if !f.IsDir() || f.Name() == ".coder" { continue } @@ -47,7 +48,7 @@ func validateCoderResourceSubdirectory(dirPath string) []error { _, err := os.Stat(resourceReadmePath) if err != nil { if errors.Is(err, os.ErrNotExist) { - errs = append(errs, fmt.Errorf("%q: 'README.md' does not exist", resourceReadmePath)) + errs = append(errs, xerrors.Errorf("%q: 'README.md' does not exist", resourceReadmePath)) } else { errs = append(errs, addFilePathToError(resourceReadmePath, err)) } @@ -57,12 +58,11 @@ func validateCoderResourceSubdirectory(dirPath string) []error { _, err = os.Stat(mainTerraformPath) if err != nil { if errors.Is(err, os.ErrNotExist) { - errs = append(errs, fmt.Errorf("%q: 'main.tf' file does not exist", mainTerraformPath)) + errs = append(errs, xerrors.Errorf("%q: 'main.tf' file does not exist", mainTerraformPath)) } else { errs = append(errs, addFilePathToError(mainTerraformPath, err)) } } - } return errs @@ -78,7 +78,7 @@ func validateRegistryDirectory() []error { for _, d := range userDirs { dirPath := path.Join(rootRegistryPath, d.Name()) if !d.IsDir() { - allErrs = append(allErrs, fmt.Errorf("detected non-directory file %q at base of main Registry directory", dirPath)) + allErrs = append(allErrs, xerrors.Errorf("detected non-directory file %q at base of main Registry directory", dirPath)) continue } @@ -96,7 +96,7 @@ func validateRegistryDirectory() []error { for _, f := range files { // Todo: Decide if there's anything more formal that we want to - // ensure about non-directories scoped to user namespaces + // ensure about non-directories scoped to user namespaces. if !f.IsDir() { continue } @@ -105,7 +105,7 @@ func validateRegistryDirectory() []error { filePath := path.Join(dirPath, segment) if !slices.Contains(supportedUserNameSpaceDirectories, segment) { - allErrs = append(allErrs, fmt.Errorf("%q: only these sub-directories are allowed at top of user namespace: [%s]", filePath, strings.Join(supportedUserNameSpaceDirectories, ", "))) + allErrs = append(allErrs, xerrors.Errorf("%q: only these sub-directories are allowed at top of user namespace: [%s]", filePath, strings.Join(supportedUserNameSpaceDirectories, ", "))) continue } @@ -129,7 +129,7 @@ func validateRepoStructure() error { _, err := os.Stat("./.icons") if err != nil { - problems = append(problems, errors.New("missing top-level .icons directory (used for storing reusable Coder resource icons)")) + problems = append(problems, xerrors.New("missing top-level .icons directory (used for storing reusable Coder resource icons)")) } if len(problems) != 0 { diff --git a/go.mod b/go.mod index b75504ecf..d3caf9128 100644 --- a/go.mod +++ b/go.mod @@ -4,6 +4,8 @@ go 1.23.2 require ( cdr.dev/slog v1.6.1 + github.com/quasilyte/go-ruleguard/dsl v0.3.22 + golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da gopkg.in/yaml.v3 v3.0.1 ) @@ -21,5 +23,4 @@ require ( golang.org/x/crypto v0.35.0 // indirect golang.org/x/sys v0.30.0 // indirect golang.org/x/term v0.29.0 // indirect - golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect ) diff --git a/go.sum b/go.sum index d65be08e5..382ca6d7d 100644 --- a/go.sum +++ b/go.sum @@ -35,6 +35,8 @@ github.com/muesli/termenv v0.15.2 h1:GohcuySI0QmI3wN8Ok9PtKGkgkFIk7y6Vpb5PvrY+Wo github.com/muesli/termenv v0.15.2/go.mod h1:Epx+iuz8sNs7mNKhxzH4fWXGNpZwUaJKRS1noLXviQ8= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/quasilyte/go-ruleguard/dsl v0.3.22 h1:wd8zkOhSNr+I+8Qeciml08ivDt1pSXe60+5DqOpCjPE= +github.com/quasilyte/go-ruleguard/dsl v0.3.22/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rivo/uniseg v0.4.4 h1:8TfxU8dW6PdqD27gjM8MVNuicgxIjxpm4K7x4jp8sis= @@ -60,8 +62,8 @@ golang.org/x/term v0.29.0 h1:L6pJp37ocefwRRtYPKSWOWzOtWSxVajvz2ldH/xi3iU= golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s= golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM= golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY= -golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 h1:H2TDz8ibqkAF6YGhCdN3jS9O0/s90v0rJh3X/OLHEUk= -golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= +golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da h1:noIWHXmPHxILtqtCOPIhSt0ABwskkZKjD3bXGnZGpNY= +golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= google.golang.org/genproto v0.0.0-20230726155614-23370e0ffb3e h1:xIXmWJ303kJCuogpj0bHq+dcjcZHU+XFyc1I0Yl9cRg= google.golang.org/genproto v0.0.0-20230726155614-23370e0ffb3e/go.mod h1:0ggbjUrZYpy1q+ANUS30SEoGZ53cdfwtbuG7Ptgy108= google.golang.org/genproto/googleapis/api v0.0.0-20230706204954-ccb25ca9f130 h1:XVeBY8d/FaK4848myy41HBqnDwvxeV3zMZhwN1TvAMU= diff --git a/scripts/rules.go b/scripts/rules.go new file mode 100644 index 000000000..f6af25571 --- /dev/null +++ b/scripts/rules.go @@ -0,0 +1,255 @@ +// Package gorules defines custom lint rules for ruleguard. +// +// golangci-lint runs these rules via go-critic, which includes support +// for ruleguard. All Go files in this directory define lint rules +// in the Ruleguard DSL; see: +// +// - https://go-ruleguard.github.io/by-example/ +// - https://pkg.go.dev/github.com/quasilyte/go-ruleguard/dsl +// +// You run one of the following commands to execute your go rules only: +// +// golangci-lint run +// golangci-lint run --disable-all --enable=gocritic +// +// Note: don't forget to run `golangci-lint cache clean`! +package gorules + +import ( + "github.com/quasilyte/go-ruleguard/dsl" +) + +// Use xerrors everywhere! It provides additional stacktrace info! +// +//nolint:unused,deadcode,varnamelen +func xerrors(m dsl.Matcher) { + m.Import("errors") + m.Import("fmt") + m.Import("golang.org/x/xerrors") + + m.Match("fmt.Errorf($arg)"). + Suggest("xerrors.New($arg)"). + Report("Use xerrors to provide additional stacktrace information!") + + m.Match("fmt.Errorf($arg1, $*args)"). + Suggest("xerrors.Errorf($arg1, $args)"). + Report("Use xerrors to provide additional stacktrace information!") + + m.Match("errors.$_($msg)"). + Where(m["msg"].Type.Is("string")). + Suggest("xerrors.New($msg)"). + Report("Use xerrors to provide additional stacktrace information!") +} + +// databaseImport enforces not importing any database types into /codersdk. +// +//nolint:unused,deadcode,varnamelen +func databaseImport(m dsl.Matcher) { + m.Import("github.com/coder/coder/v2/coderd/database") + m.Match("database.$_"). + Report("Do not import any database types into codersdk"). + Where(m.File().PkgPath.Matches("github.com/coder/coder/v2/codersdk")) +} + +// doNotCallTFailNowInsideGoroutine enforces not calling t.FailNow or +// functions that may themselves call t.FailNow in goroutines outside +// the main test goroutine. See testing.go:834 for why. +// +//nolint:unused,deadcode,varnamelen +func doNotCallTFailNowInsideGoroutine(m dsl.Matcher) { + m.Import("testing") + m.Match(` + go func($*_){ + $*_ + $require.$_($*_) + $*_ + }($*_)`). + At(m["require"]). + Where(m["require"].Text == "require"). + Report("Do not call functions that may call t.FailNow in a goroutine, as this can cause data races (see testing.go:834)") + + // require.Eventually runs the function in a goroutine. + m.Match(` + require.Eventually(t, func() bool { + $*_ + $require.$_($*_) + $*_ + }, $*_)`). + At(m["require"]). + Where(m["require"].Text == "require"). + Report("Do not call functions that may call t.FailNow in a goroutine, as this can cause data races (see testing.go:834)") + + m.Match(` + go func($*_){ + $*_ + $t.$fail($*_) + $*_ + }($*_)`). + At(m["fail"]). + Where(m["t"].Type.Implements("testing.TB") && m["fail"].Text.Matches("^(FailNow|Fatal|Fatalf)$")). + Report("Do not call functions that may call t.FailNow in a goroutine, as this can cause data races (see testing.go:834)") +} + +// useStandardTimeoutsAndDelaysInTests ensures all tests use common +// constants for timeouts and delays in usual scenarios, this allows us +// to tweak them based on platform (important to avoid CI flakes). +// +//nolint:unused,deadcode,varnamelen +func useStandardTimeoutsAndDelaysInTests(m dsl.Matcher) { + m.Import("github.com/stretchr/testify/require") + m.Import("github.com/stretchr/testify/assert") + m.Import("github.com/coder/coder/v2/testutil") + + m.Match(`context.WithTimeout($ctx, $duration)`). + Where(m.File().Imports("testing") && !m.File().PkgPath.Matches("testutil$") && !m["duration"].Text.Matches("^testutil\\.")). + At(m["duration"]). + Report("Do not use magic numbers in test timeouts and delays. Use the standard testutil.Wait* or testutil.Interval* constants instead.") + + m.Match(` + $testify.$Eventually($t, func() bool { + $*_ + }, $timeout, $interval, $*_) + `). + Where((m["testify"].Text == "require" || m["testify"].Text == "assert") && + (m["Eventually"].Text == "Eventually" || m["Eventually"].Text == "Eventuallyf") && + !m["timeout"].Text.Matches("^testutil\\.")). + At(m["timeout"]). + Report("Do not use magic numbers in test timeouts and delays. Use the standard testutil.Wait* or testutil.Interval* constants instead.") + + m.Match(` + $testify.$Eventually($t, func() bool { + $*_ + }, $timeout, $interval, $*_) + `). + Where((m["testify"].Text == "require" || m["testify"].Text == "assert") && + (m["Eventually"].Text == "Eventually" || m["Eventually"].Text == "Eventuallyf") && + !m["interval"].Text.Matches("^testutil\\.")). + At(m["interval"]). + Report("Do not use magic numbers in test timeouts and delays. Use the standard testutil.Wait* or testutil.Interval* constants instead.") +} + +// ProperRBACReturn ensures we always write to the response writer after a +// call to Authorize. If we just do a return, the client will get a status code +// 200, which is incorrect. +func ProperRBACReturn(m dsl.Matcher) { + m.Match(` + if !$_.Authorize($*_) { + return + } + `).Report("Must write to 'ResponseWriter' before returning'") +} + +// slogFieldNameSnakeCase is a lint rule that ensures naming consistency +// of logged field names. +func slogFieldNameSnakeCase(m dsl.Matcher) { + m.Import("cdr.dev/slog") + m.Match( + `slog.F($name, $value)`, + ). + Where(m["name"].Const && !m["name"].Text.Matches(`^"[a-z]+(_[a-z]+)*"$`)). + Report("Field name $name must be snake_case.") +} + +// slogUUIDFieldNameHasIDSuffix ensures that "uuid.UUID" field has ID prefix +// in the field name. +func slogUUIDFieldNameHasIDSuffix(m dsl.Matcher) { + m.Import("cdr.dev/slog") + m.Import("github.com/google/uuid") + m.Match( + `slog.F($name, $value)`, + ). + Where(m["value"].Type.Is("uuid.UUID") && !m["name"].Text.Matches(`_id"$`)). + Report(`uuid.UUID field $name must have "_id" suffix.`) +} + +// slogMessageFormat ensures that the log message starts with lowercase, and does not +// end with special character. +func slogMessageFormat(m dsl.Matcher) { + m.Import("cdr.dev/slog") + m.Match( + `logger.Error($ctx, $message, $*args)`, + `logger.Warn($ctx, $message, $*args)`, + `logger.Info($ctx, $message, $*args)`, + `logger.Debug($ctx, $message, $*args)`, + + `$foo.logger.Error($ctx, $message, $*args)`, + `$foo.logger.Warn($ctx, $message, $*args)`, + `$foo.logger.Info($ctx, $message, $*args)`, + `$foo.logger.Debug($ctx, $message, $*args)`, + + `Logger.Error($ctx, $message, $*args)`, + `Logger.Warn($ctx, $message, $*args)`, + `Logger.Info($ctx, $message, $*args)`, + `Logger.Debug($ctx, $message, $*args)`, + + `$foo.Logger.Error($ctx, $message, $*args)`, + `$foo.Logger.Warn($ctx, $message, $*args)`, + `$foo.Logger.Info($ctx, $message, $*args)`, + `$foo.Logger.Debug($ctx, $message, $*args)`, + ). + Where( + ( + // It doesn't end with a special character: + m["message"].Text.Matches(`[.!?]"$`) || + // it starts with lowercase: + m["message"].Text.Matches(`^"[A-Z]{1}`) && + // but there are exceptions: + !m["message"].Text.Matches(`^"Prometheus`) && + !m["message"].Text.Matches(`^"X11`) && + !m["message"].Text.Matches(`^"CSP`) && + !m["message"].Text.Matches(`^"OIDC`))). + Report(`Message $message must start with lowercase, and does not end with a special characters.`) +} + +// slogMessageLength ensures that important log messages are meaningful, and must be at least 16 characters long. +func slogMessageLength(m dsl.Matcher) { + m.Import("cdr.dev/slog") + m.Match( + `logger.Error($ctx, $message, $*args)`, + `logger.Warn($ctx, $message, $*args)`, + `logger.Info($ctx, $message, $*args)`, + + `$foo.logger.Error($ctx, $message, $*args)`, + `$foo.logger.Warn($ctx, $message, $*args)`, + `$foo.logger.Info($ctx, $message, $*args)`, + + `Logger.Error($ctx, $message, $*args)`, + `Logger.Warn($ctx, $message, $*args)`, + `Logger.Info($ctx, $message, $*args)`, + + `$foo.Logger.Error($ctx, $message, $*args)`, + `$foo.Logger.Warn($ctx, $message, $*args)`, + `$foo.Logger.Info($ctx, $message, $*args)`, + + // no debug + ). + Where( + // It has at least 16 characters (+ ""): + m["message"].Text.Matches(`^".{0,15}"$`) && + // but there are exceptions: + !m["message"].Text.Matches(`^"command exit"$`)). + Report(`Message $message is too short, it must be at least 16 characters long.`) +} + +// slogErr ensures that errors are logged with "slog.Error" instead of "slog.F" +func slogError(m dsl.Matcher) { + m.Import("cdr.dev/slog") + m.Match( + `slog.F($name, $value)`, + ). + Where(m["name"].Const && m["value"].Type.Is("error") && !m["name"].Text.Matches(`^"internal_error"$`)). + Report(`Error should be logged using "slog.Error" instead.`) +} + +// withTimezoneUTC ensures that we don't just sprinkle dbtestutil.WithTimezone("UTC") about +// to work around real timezone bugs in our code. +// +//nolint:unused,deadcode,varnamelen +func withTimezoneUTC(m dsl.Matcher) { + m.Match( + `dbtestutil.WithTimezone($tz)`, + ).Where( + m["tz"].Text.Matches(`[uU][tT][cC]"$`), + ).Report(`Setting database timezone to UTC may mask timezone-related bugs.`). + At(m["tz"]) +}