diff --git a/.ci/check_new_rules.go b/.ci/check_new_rules.go
index 5abae2f3..2ec00e8d 100644
--- a/.ci/check_new_rules.go
+++ b/.ci/check_new_rules.go
@@ -1,127 +1,127 @@
-// Scripts to check if all the rules that exist in the latest version of "gitleaks" are included in our list of rules (in secret.go file)
-package main
-
-import (
- "encoding/json"
- "fmt"
- "io"
- "net/http"
- "os"
- "regexp"
-)
-
-var (
- regexGitleaksRules = regexp.MustCompile(`(?m)^[^/\n\r]\s*rules\.([a-zA-Z0-9_]+)\(`)
- regex2msRules = regexp.MustCompile(`(?m)^[^/\n\r]\s*(?:// )?{Rule:\s*\*(?:rules\.)?([a-zA-Z0-9_]+)\(\),`)
-)
-
-func main() {
-
- latestGitleaksRelease, err := fetchGitleaksLatestRelease()
- if err != nil {
- fmt.Printf("%s\n", err)
- os.Exit(1)
- }
- fmt.Printf("Latest Gitleaks release: %s\n", latestGitleaksRelease)
-
- gitleaksRules, err := fetchGitleaksRules(latestGitleaksRelease)
- if err != nil {
- fmt.Printf("%s\n", err)
- os.Exit(1)
- }
-
- matchesGitleaksRules := regexGitleaksRules.FindAllStringSubmatch(string(gitleaksRules), -1)
- if len(matchesGitleaksRules) == 0 {
- fmt.Println("No rules found in the latest version of Gitleaks.")
- os.Exit(1)
- }
- fmt.Printf("Total rules in the latest version of Gitleaks: %d\n", len(matchesGitleaksRules))
-
- ourRules, err := fetchOurRules()
- if err != nil {
- fmt.Printf("%s\n", err)
- os.Exit(1)
- }
- match2msRules := regex2msRules.FindAllStringSubmatch(string(ourRules), -1)
- if len(match2msRules) == 0 {
- fmt.Println("No rules found in 2ms.")
- os.Exit(1)
- }
- fmt.Printf("Total rules in 2ms: %d\n", len(match2msRules))
-
- map2msRules := make(map[string]bool)
- for _, match := range match2msRules {
- map2msRules[match[1]] = true
- }
-
- missingRulesIn2ms := []string{}
- for _, rule := range matchesGitleaksRules {
- if _, found := map2msRules[rule[1]]; !found {
- missingRulesIn2ms = append(missingRulesIn2ms, rule[1])
- }
- }
-
- if len(missingRulesIn2ms) > 0 {
- fmt.Printf("%d rules exist in the latest version of Gitleaks but missing on 2ms: \n\n", len(missingRulesIn2ms))
- for _, rule := range missingRulesIn2ms {
- fmt.Printf("%s \n", rule)
- }
-
- fmt.Printf("\nLink to Gitleaks main.go file of version: %s:\n", latestGitleaksRelease)
- fmt.Println(getGitleaksRulesRawURL(latestGitleaksRelease))
-
- os.Exit(1)
- } else {
- fmt.Println("No differences found.")
- os.Exit(0)
- }
-}
-
-type Release struct {
- TagName string `json:"tag_name"`
-}
-
-func fetchGitleaksLatestRelease() (string, error) {
- var release Release
-
- response, err := http.Get("https://api.github.com/repos/zricethezav/gitleaks/releases/latest")
- if err != nil {
- return "", fmt.Errorf("failed to get latest release: %w", err)
- }
- defer response.Body.Close()
-
- decoder := json.NewDecoder(response.Body)
- if err := decoder.Decode(&release); err != nil {
- return "", fmt.Errorf("failed to decode latest release JSON: %w", err)
- }
-
- return release.TagName, nil
-}
-
-func fetchGitleaksRules(version string) ([]byte, error) {
- rawURLGitleaksRules := getGitleaksRulesRawURL(version)
- response, err := http.Get(rawURLGitleaksRules)
- if err != nil {
- return nil, fmt.Errorf("failed to fetch remote file: %w", err)
- }
- defer response.Body.Close()
-
- content, err := io.ReadAll(response.Body)
- if err != nil {
- return nil, fmt.Errorf("failed to read remote file content: %w", err)
- }
-
- return content, nil
-}
-
-func getGitleaksRulesRawURL(version string) string {
- return fmt.Sprintf("https://raw.githubusercontent.com/zricethezav/gitleaks/%s/cmd/generate/config/main.go", version)
-}
-
-func fetchOurRules() ([]byte, error) {
- content, err := os.ReadFile("engine/rules/rules.go")
- if err != nil {
- return nil, fmt.Errorf("failed to read our file content: %w", err)
- }
- return content, nil
-}
+// Scripts to check if all the rules that exist in the latest version of "gitleaks" are included in our list of rules (in secret.go file)
+package main
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+ "net/http"
+ "os"
+ "regexp"
+)
+
+var (
+ regexGitleaksRules = regexp.MustCompile(`(?m)^[^/\n\r]\s*rules\.([a-zA-Z0-9_]+)\(`)
+ regex2msRules = regexp.MustCompile(`(?m)^[^/\n\r]\s*(?:// )?{Rule:\s*\*(?:rules\.)?([a-zA-Z0-9_]+)\(\),`)
+)
+
+func main() {
+
+ latestGitleaksRelease, err := fetchGitleaksLatestRelease()
+ if err != nil {
+ fmt.Printf("%s\n", err)
+ os.Exit(1)
+ }
+ fmt.Printf("Latest Gitleaks release: %s\n", latestGitleaksRelease)
+
+ gitleaksRules, err := fetchGitleaksRules(latestGitleaksRelease)
+ if err != nil {
+ fmt.Printf("%s\n", err)
+ os.Exit(1)
+ }
+
+ matchesGitleaksRules := regexGitleaksRules.FindAllStringSubmatch(string(gitleaksRules), -1)
+ if len(matchesGitleaksRules) == 0 {
+ fmt.Println("No rules found in the latest version of Gitleaks.")
+ os.Exit(1)
+ }
+ fmt.Printf("Total rules in the latest version of Gitleaks: %d\n", len(matchesGitleaksRules))
+
+ ourRules, err := fetchOurRules()
+ if err != nil {
+ fmt.Printf("%s\n", err)
+ os.Exit(1)
+ }
+ match2msRules := regex2msRules.FindAllStringSubmatch(string(ourRules), -1)
+ if len(match2msRules) == 0 {
+ fmt.Println("No rules found in 2ms.")
+ os.Exit(1)
+ }
+ fmt.Printf("Total rules in 2ms: %d\n", len(match2msRules))
+
+ map2msRules := make(map[string]bool)
+ for _, match := range match2msRules {
+ map2msRules[match[1]] = true
+ }
+
+ missingRulesIn2ms := []string{}
+ for _, rule := range matchesGitleaksRules {
+ if _, found := map2msRules[rule[1]]; !found {
+ missingRulesIn2ms = append(missingRulesIn2ms, rule[1])
+ }
+ }
+
+ if len(missingRulesIn2ms) > 0 {
+ fmt.Printf("%d rules exist in the latest version of Gitleaks but missing on 2ms: \n\n", len(missingRulesIn2ms))
+ for _, rule := range missingRulesIn2ms {
+ fmt.Printf("%s \n", rule)
+ }
+
+ fmt.Printf("\nLink to Gitleaks main.go file of version: %s:\n", latestGitleaksRelease)
+ fmt.Println(getGitleaksRulesRawURL(latestGitleaksRelease))
+
+ os.Exit(1)
+ } else {
+ fmt.Println("No differences found.")
+ os.Exit(0)
+ }
+}
+
+type Release struct {
+ TagName string `json:"tag_name"`
+}
+
+func fetchGitleaksLatestRelease() (string, error) {
+ var release Release
+
+ response, err := http.Get("https://api.github.com/repos/zricethezav/gitleaks/releases/latest")
+ if err != nil {
+ return "", fmt.Errorf("failed to get latest release: %w", err)
+ }
+ defer response.Body.Close()
+
+ decoder := json.NewDecoder(response.Body)
+ if err := decoder.Decode(&release); err != nil {
+ return "", fmt.Errorf("failed to decode latest release JSON: %w", err)
+ }
+
+ return release.TagName, nil
+}
+
+func fetchGitleaksRules(version string) ([]byte, error) {
+ rawURLGitleaksRules := getGitleaksRulesRawURL(version)
+ response, err := http.Get(rawURLGitleaksRules)
+ if err != nil {
+ return nil, fmt.Errorf("failed to fetch remote file: %w", err)
+ }
+ defer response.Body.Close()
+
+ content, err := io.ReadAll(response.Body)
+ if err != nil {
+ return nil, fmt.Errorf("failed to read remote file content: %w", err)
+ }
+
+ return content, nil
+}
+
+func getGitleaksRulesRawURL(version string) string {
+ return fmt.Sprintf("https://raw.githubusercontent.com/zricethezav/gitleaks/%s/cmd/generate/config/main.go", version)
+}
+
+func fetchOurRules() ([]byte, error) {
+ content, err := os.ReadFile("engine/rules/rules.go")
+ if err != nil {
+ return nil, fmt.Errorf("failed to read our file content: %w", err)
+ }
+ return content, nil
+}
diff --git a/.ci/update-readme.sh b/.ci/update-readme.sh
index 7e98d4d1..0ee97947 100755
--- a/.ci/update-readme.sh
+++ b/.ci/update-readme.sh
@@ -1,30 +1,30 @@
-update_readme() {
- output_file=$1
- placeholder_name=$2
- target_file=$3
-
- sed -i "//,//{
- //{
- p
- r $output_file
- }
- //!d
- }" $target_file
-}
-
-# Update the README with the help message
-help_message=$(go run .)
-
-echo "" >output.txt
-echo '```text' >>output.txt
-echo "$help_message" >>output.txt
-echo '```' >>output.txt
-echo "" >>output.txt
-update_readme "output.txt" "command-line" "README.md"
-rm output.txt
-
-go run . rules | awk 'BEGIN{FS = " *"}{print "| " $1 " | " $2 " | " $3 " | " $4 " |";}' >output.txt
-update_readme "output.txt" "table" "./docs/list-of-rules.md"
-rm output.txt
-
-git --no-pager diff README.md ./docs/list-of-rules.md
+update_readme() {
+ output_file=$1
+ placeholder_name=$2
+ target_file=$3
+
+ sed -i "//,//{
+ //{
+ p
+ r $output_file
+ }
+ //!d
+ }" $target_file
+}
+
+# Update the README with the help message
+help_message=$(go run .)
+
+echo "" >output.txt
+echo '```text' >>output.txt
+echo "$help_message" >>output.txt
+echo '```' >>output.txt
+echo "" >>output.txt
+update_readme "output.txt" "command-line" "README.md"
+rm output.txt
+
+go run . rules | awk 'BEGIN{FS = " *"}{print "| " $1 " | " $2 " | " $3 " | " $4 " |";}' >output.txt
+update_readme "output.txt" "table" "./docs/list-of-rules.md"
+rm output.txt
+
+git --no-pager diff README.md ./docs/list-of-rules.md
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 10ea74a1..bc3238e4 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -1 +1 @@
-* @Checkmarx/2ms-dev
+* @Checkmarx/2ms-dev
diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
index 64649c49..274bf08a 100644
--- a/.github/pull_request_template.md
+++ b/.github/pull_request_template.md
@@ -1,20 +1,20 @@
-
-
-Closes #
-
-**Proposed Changes**
-
-
-
-**Checklist**
-
-- [ ] I covered my changes with tests.
-- [ ] I Updated the documentation that is affected by my changes:
- - [ ] Change in the CLI arguments
- - [ ] Change in the configuration file
-
-I submit this contribution under the Apache-2.0 license.
+
+
+Closes #
+
+**Proposed Changes**
+
+
+
+**Checklist**
+
+- [ ] I covered my changes with tests.
+- [ ] I Updated the documentation that is affected by my changes:
+ - [ ] Change in the CLI arguments
+ - [ ] Change in the configuration file
+
+I submit this contribution under the Apache-2.0 license.
diff --git a/.github/workflows/codecov.yaml b/.github/workflows/codecov.yaml
index c833fad4..705bd825 100644
--- a/.github/workflows/codecov.yaml
+++ b/.github/workflows/codecov.yaml
@@ -1,45 +1,45 @@
-
-name: Codecov Scan
-
-on:
- push:
- branches:
- - main
- pull_request:
- workflow_dispatch:
-
-jobs:
- run:
- runs-on: ubuntu-latest
- env:
- go-version: 'stable'
-
- steps:
- - name: Checkout code
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
-
- - name: Set up Go
- uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0
- with:
- go-version: ${{ env.go-version }}
- env:
- GOPROXY: direct
- GONOSUMDB: "*"
- GOPRIVATE: https://github.com/CheckmarxDev/ # Add your private organization url here
-
- - name: Install dependencies
- run: go install golang.org/x/tools/cmd/cover@latest
-
- - name: Run tests and generate coverage
- run: |
- go test ./... -coverpkg=./... -v -coverprofile cover.out
-
-
- - name: Upload coverage to Codecov
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed # v4.3.0
- with:
- token: ${{ secrets.CODECOV_TOKEN }}
- files: ./cover.out
- flags: target=auto
- fail_ci_if_error: true
- verbose: false
+
+name: Codecov Scan
+
+on:
+ push:
+ branches:
+ - main
+ pull_request:
+ workflow_dispatch:
+
+jobs:
+ run:
+ runs-on: ubuntu-latest
+ env:
+ go-version: 'stable'
+
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+
+ - name: Set up Go
+ uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0
+ with:
+ go-version: ${{ env.go-version }}
+ env:
+ GOPROXY: direct
+ GONOSUMDB: "*"
+ GOPRIVATE: https://github.com/CheckmarxDev/ # Add your private organization url here
+
+ - name: Install dependencies
+ run: go install golang.org/x/tools/cmd/cover@latest
+
+ - name: Run tests and generate coverage
+ run: |
+ go test ./... -coverpkg=./... -v -coverprofile cover.out
+
+
+ - name: Upload coverage to Codecov
+ uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed # v4.3.0
+ with:
+ token: ${{ secrets.CODECOV_TOKEN }}
+ files: ./cover.out
+ flags: target=auto
+ fail_ci_if_error: true
+ verbose: false
diff --git a/.github/workflows/new-rules.yml b/.github/workflows/new-rules.yml
index 61b5043a..6e434320 100644
--- a/.github/workflows/new-rules.yml
+++ b/.github/workflows/new-rules.yml
@@ -1,17 +1,17 @@
-name: New Rules from Gitleaks
-
-on:
- workflow_dispatch:
- schedule:
- - cron: "0 2 * * 6" # At 02:00 on Saturday
-
-jobs:
- update_secrets:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0
- with:
- go-version: "^1.22"
- - name: Check Gitleaks new rules
- run: go run .ci/check_new_rules.go
+name: New Rules from Gitleaks
+
+on:
+ workflow_dispatch:
+ schedule:
+ - cron: "0 2 * * 6" # At 02:00 on Saturday
+
+jobs:
+ update_secrets:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0
+ with:
+ go-version: "^1.22"
+ - name: Check Gitleaks new rules
+ run: go run .ci/check_new_rules.go
diff --git a/.github/workflows/pr-labels.yml b/.github/workflows/pr-labels.yml
index be06a2b5..d2f0e111 100644
--- a/.github/workflows/pr-labels.yml
+++ b/.github/workflows/pr-labels.yml
@@ -1,23 +1,23 @@
-name: PR Labels
-
-on:
- pull_request_target:
- types: [opened]
-
-jobs:
- mark_as_community:
- runs-on: ubuntu-latest
- permissions:
- pull-requests: write
- steps:
- - name: Mark as Community if PR is from a fork
- if: github.event.pull_request.head.repo.full_name != github.repository
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
- with:
- script: |
- github.rest.issues.addLabels({
- issue_number: context.issue.number,
- owner: context.repo.owner,
- repo: context.repo.repo,
- labels: ['Community']
- })
+name: PR Labels
+
+on:
+ pull_request_target:
+ types: [opened]
+
+jobs:
+ mark_as_community:
+ runs-on: ubuntu-latest
+ permissions:
+ pull-requests: write
+ steps:
+ - name: Mark as Community if PR is from a fork
+ if: github.event.pull_request.head.repo.full_name != github.repository
+ uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
+ with:
+ script: |
+ github.rest.issues.addLabels({
+ issue_number: context.issue.number,
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ labels: ['Community']
+ })
diff --git a/.github/workflows/pr-title.yml b/.github/workflows/pr-title.yml
index c3143efb..5f3431c8 100644
--- a/.github/workflows/pr-title.yml
+++ b/.github/workflows/pr-title.yml
@@ -1,18 +1,18 @@
-name: Validate Conventional Commit title
-
-on:
- pull_request:
- types: [opened, edited, synchronize, reopened]
-
-jobs:
- validate:
- runs-on: ubuntu-latest
- steps:
- - name: install commitlint
- run: npm install -g @commitlint/cli @commitlint/config-conventional
- - name: config commitlint
- run: |
- echo "module.exports = {extends: ['@commitlint/config-conventional']}" > commitlint.config.js
- - name: validate PR title
- run: |
- echo ${{ github.event.pull_request.title }} | commitlint
+name: Validate Conventional Commit title
+
+on:
+ pull_request:
+ types: [opened, edited, synchronize, reopened]
+
+jobs:
+ validate:
+ runs-on: ubuntu-latest
+ steps:
+ - name: install commitlint
+ run: npm install -g @commitlint/cli @commitlint/config-conventional
+ - name: config commitlint
+ run: |
+ echo "module.exports = {extends: ['@commitlint/config-conventional']}" > commitlint.config.js
+ - name: validate PR title
+ run: |
+ echo ${{ github.event.pull_request.title }} | commitlint
diff --git a/.github/workflows/pr-validation.yml b/.github/workflows/pr-validation.yml
index 0b349558..23e91e78 100644
--- a/.github/workflows/pr-validation.yml
+++ b/.github/workflows/pr-validation.yml
@@ -1,76 +1,76 @@
-name: PR Validation
-
-on:
- pull_request:
- branches:
- - master
- merge_group:
-
-jobs:
- test:
- strategy:
- matrix:
- os: [ubuntu-latest]
-
- runs-on: ${{ matrix.os }}
-
- steps:
- - name: Checkout code
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- with:
- fetch-depth: 0 # Required for 2ms to have visibility to all commit history
-
- - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0
- with:
- go-version: "^1.22"
-
- - name: go mod tidy
- run: |
- go mod tidy
- git diff --exit-code
-
- - name: Go Linter
- run: docker run --rm -v $(pwd):/app -w /app golangci/golangci-lint:v1.61.0 golangci-lint run -v -E gofmt --timeout=5m --out-format github-actions
-
- - name: Go Test
- run: go test -v ./...
-
- - name: Run 2ms Scan
- run: go run . git . --config .2ms.yml
-
- build:
- runs-on: ubuntu-latest
- steps:
- - name: Checkout code
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
-
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb #v3.3.0
-
- - run: make build
- - name: docker run
- run: |
- docker run -v "$(pwd)":/repo -t checkmarx/2ms:latest git /repo --report-path output/results.json --ignore-on-exit results
-
- kics:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- - run: mkdir -p kics-results
-
- - name: Run KICS scan
- uses: checkmarx/kics-github-action@03c9abe351b01c3e4dbe60fa00ff79ee07d73f44 # master
- with:
- path: .
- output_path: kics-results
- output_formats: json,sarif
- enable_comments: ${{ github.event_name == 'pull_request'}}
- fail_on: high,medium
- enable_jobs_summary: true
- - name: Show KICS results
- if: failure()
- run: cat kics-results/results.json
- # - name: Upload SARIF file
- # uses: github/codeql-action/upload-sarif@4355270be187e1b672a7a1c7c7bae5afdc1ab94a #v3.24.10
- # with:
- # sarif_file: kics-results/results.sarif
+name: PR Validation
+
+on:
+ pull_request:
+ branches:
+ - master
+ merge_group:
+
+jobs:
+ test:
+ strategy:
+ matrix:
+ os: [ubuntu-latest]
+
+ runs-on: ${{ matrix.os }}
+
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ with:
+ fetch-depth: 0 # Required for 2ms to have visibility to all commit history
+
+ - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0
+ with:
+ go-version: "^1.22"
+
+ - name: go mod tidy
+ run: |
+ go mod tidy
+ git diff --exit-code
+
+ - name: Go Linter
+ run: docker run --rm -v $(pwd):/app -w /app golangci/golangci-lint:v1.61.0 golangci-lint run -v -E gofmt --timeout=5m --out-format github-actions
+
+ - name: Go Test
+ run: go test -v ./...
+
+ - name: Run 2ms Scan
+ run: go run . git . --config .2ms.yml
+
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb #v3.3.0
+
+ - run: make build
+ - name: docker run
+ run: |
+ docker run -v "$(pwd)":/repo -t checkmarx/2ms:latest git /repo --report-path output/results.json --ignore-on-exit results
+
+ kics:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ - run: mkdir -p kics-results
+
+ - name: Run KICS scan
+ uses: checkmarx/kics-github-action@03c9abe351b01c3e4dbe60fa00ff79ee07d73f44 # master
+ with:
+ path: .
+ output_path: kics-results
+ output_formats: json,sarif
+ enable_comments: ${{ github.event_name == 'pull_request'}}
+ fail_on: high,medium
+ enable_jobs_summary: true
+ - name: Show KICS results
+ if: failure()
+ run: cat kics-results/results.json
+ # - name: Upload SARIF file
+ # uses: github/codeql-action/upload-sarif@4355270be187e1b672a7a1c7c7bae5afdc1ab94a #v3.24.10
+ # with:
+ # sarif_file: kics-results/results.sarif
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index daeea115..86051d7d 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -1,139 +1,139 @@
-name: Release
-
-on:
- workflow_dispatch:
- push:
- branches: [master]
-
-jobs:
- test:
- name: Test
- runs-on: ubuntu-latest
-
- outputs:
- git_tag: ${{ steps.semantic_release_info.outputs.git_tag }}
- version: ${{ steps.semantic_release_info.outputs.version }}
- notes: ${{ steps.semantic_release_info.outputs.notes }}
- steps:
- - name: Checkout code
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- with:
- fetch-depth: 0 # Required for 2ms to have visibility to all commit history
-
- - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0
- with:
- go-version: "^1.22"
- - name: Go Linter
- run: docker run --rm -v $(pwd):/app -w /app golangci/golangci-lint:v1.61.0 golangci-lint run -v -E gofmt --timeout=5m
-
- - name: Unit Tests
- run: go test ./...
-
- - name: Gets release info
- id: semantic_release_info
- if: github.event_name == 'workflow_dispatch'
- uses: jossef/action-semantic-release-info@277fc891fc5ac40ed0e8d6bf59a0e24a25dfdeac #v3.0.0
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
- build:
- name: Build and Release
- runs-on: ubuntu-latest
- needs: test
- if: ${{ needs.test.outputs.git_tag }}
- steps:
- - name: Checkout code
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
-
- - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0
- with:
- go-version: "^1.22"
-
- - name: Go Mod Tidy
- run: go mod tidy
-
- - name: Compile for Linux amd64
- env:
- VERSION: ${{ needs.test.outputs.version }}
- CGO_ENABLED: 0
- GOOS: linux
- GOARCH: amd64
- run: |
- go build -ldflags "-s -w -X github.com/checkmarx/2ms/cmd.Version=$VERSION" -a -installsuffix cgo -o dist/2ms main.go
- zip -j dist/linux-amd64.zip dist/2ms
- rm dist/2ms
-
- - name: Compile for MacOS amd64
- env:
- VERSION: ${{ needs.test.outputs.version }}
- CGO_ENABLED: 0
- GOOS: darwin
- GOARCH: amd64
- run: |
- go build -ldflags "-s -w -X github.com/checkmarx/2ms/cmd.Version=$VERSION" -a -installsuffix cgo -o dist/2ms main.go
- zip -j dist/macos-amd64.zip dist/2ms
- rm dist/2ms
-
- - name: Compile for MacOS arm64
- env:
- VERSION: ${{ needs.test.outputs.version }}
- CGO_ENABLED: 0
- GOOS: darwin
- GOARCH: arm64
- run: |
- go build -ldflags "-s -w -X github.com/checkmarx/2ms/cmd.Version=$VERSION" -a -installsuffix cgo -o dist/2ms main.go
- zip -j dist/macos-arm64.zip dist/2ms
- rm dist/2ms
-
- - name: Compile for Windows amd64
- env:
- VERSION: ${{ needs.test.outputs.version }}
- CGO_ENABLED: 0
- GOOS: windows
- GOARCH: amd64
- run: |
- go build -ldflags "-s -w -X github.com/checkmarx/2ms/cmd.Version=$VERSION" -a -installsuffix cgo -o dist/2ms.exe main.go
- zip -j dist/windows-amd64.zip dist/2ms.exe
- rm dist/2ms.exe
-
- - name: Set up QEMU
- uses: docker/setup-qemu-action@5927c834f5b4fdf503fca6f4c7eccda82949e1ee # v2
- with:
- image: tonistiigi/binfmt:latest
- platforms: linux/amd64,linux/arm64
-
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb #v3.3.0
-
- - name: Login to DockerHub
- uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20 #v3.1.0
- with:
- username: ${{ secrets.DOCKERHUB_USERNAME }}
- password: ${{ secrets.DOCKERHUB_TOKEN }}
-
- - name: Creating Release
- uses: softprops/action-gh-release@975c1b265e11dd76618af1c374e7981f9a6ff44a
- with:
- tag_name: ${{ needs.test.outputs.git_tag }}
- name: ${{ needs.test.outputs.git_tag }}
- body: ${{ needs.test.outputs.notes }}
- target_commitish: ${{ steps.commit_and_push.outputs.latest_commit_hash }}
- files: |
- dist/*.zip
-
- - name: Build and push
- uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0 # v5.3.0
- with:
- context: .
- platforms: linux/amd64,linux/arm64
- push: true
- tags: |
- checkmarx/2ms:latest
- checkmarx/2ms:${{ needs.test.outputs.version }}
-
- - name: Update Docker repo description
- uses: peter-evans/dockerhub-description@e98e4d1628a5f3be2be7c231e50981aee98723ae # v4.0.0
- with:
- username: ${{ secrets.DOCKERHUB_USERNAME }}
- password: ${{ secrets.DOCKERHUB_TOKEN }}
- repository: checkmarx/2ms
+name: Release
+
+on:
+ workflow_dispatch:
+ push:
+ branches: [master]
+
+jobs:
+ test:
+ name: Test
+ runs-on: ubuntu-latest
+
+ outputs:
+ git_tag: ${{ steps.semantic_release_info.outputs.git_tag }}
+ version: ${{ steps.semantic_release_info.outputs.version }}
+ notes: ${{ steps.semantic_release_info.outputs.notes }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ with:
+ fetch-depth: 0 # Required for 2ms to have visibility to all commit history
+
+ - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0
+ with:
+ go-version: "^1.22"
+ - name: Go Linter
+ run: docker run --rm -v $(pwd):/app -w /app golangci/golangci-lint:v1.61.0 golangci-lint run -v -E gofmt --timeout=5m
+
+ - name: Unit Tests
+ run: go test ./...
+
+ - name: Gets release info
+ id: semantic_release_info
+ if: github.event_name == 'workflow_dispatch'
+ uses: jossef/action-semantic-release-info@277fc891fc5ac40ed0e8d6bf59a0e24a25dfdeac #v3.0.0
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
+ build:
+ name: Build and Release
+ runs-on: ubuntu-latest
+ needs: test
+ if: ${{ needs.test.outputs.git_tag }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+
+ - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0
+ with:
+ go-version: "^1.22"
+
+ - name: Go Mod Tidy
+ run: go mod tidy
+
+ - name: Compile for Linux amd64
+ env:
+ VERSION: ${{ needs.test.outputs.version }}
+ CGO_ENABLED: 0
+ GOOS: linux
+ GOARCH: amd64
+ run: |
+ go build -ldflags "-s -w -X github.com/checkmarx/2ms/cmd.Version=$VERSION" -a -installsuffix cgo -o dist/2ms main.go
+ zip -j dist/linux-amd64.zip dist/2ms
+ rm dist/2ms
+
+ - name: Compile for MacOS amd64
+ env:
+ VERSION: ${{ needs.test.outputs.version }}
+ CGO_ENABLED: 0
+ GOOS: darwin
+ GOARCH: amd64
+ run: |
+ go build -ldflags "-s -w -X github.com/checkmarx/2ms/cmd.Version=$VERSION" -a -installsuffix cgo -o dist/2ms main.go
+ zip -j dist/macos-amd64.zip dist/2ms
+ rm dist/2ms
+
+ - name: Compile for MacOS arm64
+ env:
+ VERSION: ${{ needs.test.outputs.version }}
+ CGO_ENABLED: 0
+ GOOS: darwin
+ GOARCH: arm64
+ run: |
+ go build -ldflags "-s -w -X github.com/checkmarx/2ms/cmd.Version=$VERSION" -a -installsuffix cgo -o dist/2ms main.go
+ zip -j dist/macos-arm64.zip dist/2ms
+ rm dist/2ms
+
+ - name: Compile for Windows amd64
+ env:
+ VERSION: ${{ needs.test.outputs.version }}
+ CGO_ENABLED: 0
+ GOOS: windows
+ GOARCH: amd64
+ run: |
+ go build -ldflags "-s -w -X github.com/checkmarx/2ms/cmd.Version=$VERSION" -a -installsuffix cgo -o dist/2ms.exe main.go
+ zip -j dist/windows-amd64.zip dist/2ms.exe
+ rm dist/2ms.exe
+
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@5927c834f5b4fdf503fca6f4c7eccda82949e1ee # v2
+ with:
+ image: tonistiigi/binfmt:latest
+ platforms: linux/amd64,linux/arm64
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb #v3.3.0
+
+ - name: Login to DockerHub
+ uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20 #v3.1.0
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+
+ - name: Creating Release
+ uses: softprops/action-gh-release@975c1b265e11dd76618af1c374e7981f9a6ff44a
+ with:
+ tag_name: ${{ needs.test.outputs.git_tag }}
+ name: ${{ needs.test.outputs.git_tag }}
+ body: ${{ needs.test.outputs.notes }}
+ target_commitish: ${{ steps.commit_and_push.outputs.latest_commit_hash }}
+ files: |
+ dist/*.zip
+
+ - name: Build and push
+ uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0 # v5.3.0
+ with:
+ context: .
+ platforms: linux/amd64,linux/arm64
+ push: true
+ tags: |
+ checkmarx/2ms:latest
+ checkmarx/2ms:${{ needs.test.outputs.version }}
+
+ - name: Update Docker repo description
+ uses: peter-evans/dockerhub-description@e98e4d1628a5f3be2be7c231e50981aee98723ae # v4.0.0
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+ repository: checkmarx/2ms
diff --git a/.github/workflows/validate-readme.yml b/.github/workflows/validate-readme.yml
index dfd36871..316593a5 100644
--- a/.github/workflows/validate-readme.yml
+++ b/.github/workflows/validate-readme.yml
@@ -1,30 +1,30 @@
-name: Validate README
-
-on:
- pull_request:
- branches:
- - master
- merge_group:
-
-jobs:
- validate:
- name: README should be updated
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0
- with:
- go-version: "^1.22"
-
- - name: update README
- run: ./.ci/update-readme.sh
- - name: validate README wasn't updated
- run: |
- if ! git diff-index --quiet HEAD; then
- # Find the line numbers of the start and end markers
- start_line=$(grep -n '' README.md | cut -d ":" -f 1)
- end_line=$(grep -n '' README.md | cut -d ":" -f 1)
-
- echo "::error file=README.md,title=Outdated README,line=$start_line,endLine=$end_line::README.md is outdated, please run ./.ci/update-readme.sh"
- exit 1
- fi
+name: Validate README
+
+on:
+ pull_request:
+ branches:
+ - master
+ merge_group:
+
+jobs:
+ validate:
+ name: README should be updated
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0
+ with:
+ go-version: "^1.22"
+
+ - name: update README
+ run: ./.ci/update-readme.sh
+ - name: validate README wasn't updated
+ run: |
+ if ! git diff-index --quiet HEAD; then
+ # Find the line numbers of the start and end markers
+ start_line=$(grep -n '' README.md | cut -d ":" -f 1)
+ end_line=$(grep -n '' README.md | cut -d ":" -f 1)
+
+ echo "::error file=README.md,title=Outdated README,line=$start_line,endLine=$end_line::README.md is outdated, please run ./.ci/update-readme.sh"
+ exit 1
+ fi
diff --git a/.gitignore b/.gitignore
index 2f407938..c1970de6 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,21 +1,21 @@
-# Binaries for programs and plugins
-*.exe
-*.exe~
-*.dll
-*.so
-*.dylib
-
-# Test binary, built with `go test -c`
-*.test
-.idea
-# Output of the go coverage tool, specifically when used with LiteIDE
-*.out
-
-# Dependency directories (remove the comment below to include it)
-vendor/
-
-# IDE directories and files
-.vscode/
-
-dist
-2ms
+# Binaries for programs and plugins
+*.exe
+*.exe~
+*.dll
+*.so
+*.dylib
+
+# Test binary, built with `go test -c`
+*.test
+.idea
+# Output of the go coverage tool, specifically when used with LiteIDE
+*.out
+
+# Dependency directories (remove the comment below to include it)
+vendor/
+
+# IDE directories and files
+.vscode/
+
+dist
+2ms
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 6f440b49..6f13d462 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,14 +1,14 @@
-# Welcome to the 2ms club!
-
-> [!NOTE]
-> This is the first version of the document, we will rewrite it on the fly.
-
-## Test
-
-Along with the regular unit tests, we also have a set of other tests:
-
-- `tests/cli` - e2e tests that build the CLI, run it, and check the output.
- To skip these tests, run `go test -short ./...`.
-- `tests/lint` - linter, to verify we are not using our forbidden functions (for example, using `fmt.Print` instead of `log.Info`)
-- `.ci/check_new_rules.go` - compares the list of rules in the [latest _gitleaks_ release](https://github.com/gitleaks/gitleaks/releases/latest) with our list of rules, and fails if there are rules in the release that are not in our list.
-- `.ci/update-readme.sh` - auto update the `help` message in the [README.md](README.md#command-line-interface) file.
+# Welcome to the 2ms club!
+
+> [!NOTE]
+> This is the first version of the document, we will rewrite it on the fly.
+
+## Test
+
+Along with the regular unit tests, we also have a set of other tests:
+
+- `tests/cli` - e2e tests that build the CLI, run it, and check the output.
+ To skip these tests, run `go test -short ./...`.
+- `tests/lint` - linter, to verify we are not using our forbidden functions (for example, using `fmt.Print` instead of `log.Info`)
+- `.ci/check_new_rules.go` - compares the list of rules in the [latest _gitleaks_ release](https://github.com/gitleaks/gitleaks/releases/latest) with our list of rules, and fails if there are rules in the release that are not in our list.
+- `.ci/update-readme.sh` - auto update the `help` message in the [README.md](README.md#command-line-interface) file.
diff --git a/LICENSE b/LICENSE
index 5502cdae..24649793 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,201 +1,201 @@
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [2023] [Checkmarx Ltd.]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [2023] [Checkmarx Ltd.]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/Makefile b/Makefile
index 4df81607..e49ba1a9 100644
--- a/Makefile
+++ b/Makefile
@@ -1,18 +1,18 @@
-image_label ?= latest
-image_name ?= checkmarx/2ms:$(image_label)
-image_file_name ?= checkmarx-2ms-$(image_label).tar
-
-build:
- docker build -t $(image_name) .
-
-save: build
- docker save $(image_name) > $(image_file_name)
-
-run:
- docker run -it $(image_name) $(ARGS)
-
-# To run golangci-lint, you need to install it first: https://golangci-lint.run/usage/install/#local-installation
-lint:
- golangci-lint run -v -E gofmt --timeout=5m
-lint-fix:
+image_label ?= latest
+image_name ?= checkmarx/2ms:$(image_label)
+image_file_name ?= checkmarx-2ms-$(image_label).tar
+
+build:
+ docker build -t $(image_name) .
+
+save: build
+ docker save $(image_name) > $(image_file_name)
+
+run:
+ docker run -it $(image_name) $(ARGS)
+
+# To run golangci-lint, you need to install it first: https://golangci-lint.run/usage/install/#local-installation
+lint:
+ golangci-lint run -v -E gofmt --timeout=5m
+lint-fix:
golangci-lint run -v -E gofmt --fix --timeout=5m
\ No newline at end of file
diff --git a/cmd/config_test.go b/cmd/config_test.go
index adbf665b..9c5c0aac 100644
--- a/cmd/config_test.go
+++ b/cmd/config_test.go
@@ -1,103 +1,103 @@
-package cmd
-
-import (
- "fmt"
- "github.com/rs/zerolog"
- "github.com/rs/zerolog/log"
- "github.com/spf13/cobra"
- "github.com/stretchr/testify/assert"
- "testing"
-)
-
-func TestValidateFormat(t *testing.T) {
- tests := []struct {
- name string
- stdoutFormatVar string
- reportPath []string
- expectedErr error
- }{
- {
- name: "valid output format and report extension json",
- stdoutFormatVar: "json",
- reportPath: []string{"report.json"},
- expectedErr: nil,
- },
- {
- name: "valid output format and report extension yaml",
- stdoutFormatVar: "yaml",
- reportPath: []string{"report.yaml"},
- expectedErr: nil,
- },
- {
- name: "valid output format and report extension sarif",
- stdoutFormatVar: "sarif",
- reportPath: []string{"report.sarif"},
- expectedErr: nil,
- },
- {
- name: "invalid output format",
- stdoutFormatVar: "invalid",
- reportPath: []string{"report.json"},
- expectedErr: fmt.Errorf("invalid output format: invalid, available formats are: json, yaml and sarif"),
- },
- {
- name: "invalid report extension",
- stdoutFormatVar: "json",
- reportPath: []string{"report.invalid"},
- expectedErr: fmt.Errorf("invalid report extension: invalid, available extensions are: json, yaml and sarif"),
- },
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- err := validateFormat(tt.stdoutFormatVar, tt.reportPath)
- assert.Equal(t, tt.expectedErr, err)
- })
- }
-}
-
-func TestInitializeLogLevels(t *testing.T) {
- testCases := []struct {
- name string
- logLevelInput string
- expectedLevel zerolog.Level
- }{
- {"Trace Level", "trace", zerolog.TraceLevel},
- {"Debug Level", "debug", zerolog.DebugLevel},
- {"Info Level", "info", zerolog.InfoLevel},
- {"Warn Level", "warn", zerolog.WarnLevel},
- {"Error Level with 'error'", "error", zerolog.ErrorLevel},
- {"Error Level with 'err'", "err", zerolog.ErrorLevel},
- {"Fatal Level", "fatal", zerolog.FatalLevel},
- {"Invalid Level Defaults to Info", "invalid", zerolog.InfoLevel},
- {"Empty Level Defaults to Info", "", zerolog.InfoLevel},
- }
-
- for _, tc := range testCases {
- t.Run(tc.name, func(t *testing.T) {
- originalRootCmd := rootCmd
- defer func() { rootCmd = originalRootCmd }()
- rootCmd = &cobra.Command{
- Use: "test",
- Run: func(cmd *cobra.Command, args []string) {
- cmd.Flags().StringVar(&configFilePath, configFileFlag, "", "")
- cmd.Flags().StringVar(&logLevelVar, logLevelFlagName, "", "")
-
- err := cmd.Flags().Set(configFileFlag, "")
- assert.NoError(t, err)
-
- err = cmd.Flags().Set(logLevelFlagName, tc.logLevelInput)
- assert.NoError(t, err)
-
- initialize()
-
- assert.Equal(t, tc.expectedLevel, zerolog.GlobalLevel())
- assert.Equal(t, tc.expectedLevel, log.Logger.GetLevel())
- },
- }
-
- err := rootCmd.Execute()
- assert.NoError(t, err, "Error executing command")
- })
- }
-}
+package cmd
+
+import (
+ "fmt"
+ "github.com/rs/zerolog"
+ "github.com/rs/zerolog/log"
+ "github.com/spf13/cobra"
+ "github.com/stretchr/testify/assert"
+ "testing"
+)
+
+func TestValidateFormat(t *testing.T) {
+ tests := []struct {
+ name string
+ stdoutFormatVar string
+ reportPath []string
+ expectedErr error
+ }{
+ {
+ name: "valid output format and report extension json",
+ stdoutFormatVar: "json",
+ reportPath: []string{"report.json"},
+ expectedErr: nil,
+ },
+ {
+ name: "valid output format and report extension yaml",
+ stdoutFormatVar: "yaml",
+ reportPath: []string{"report.yaml"},
+ expectedErr: nil,
+ },
+ {
+ name: "valid output format and report extension sarif",
+ stdoutFormatVar: "sarif",
+ reportPath: []string{"report.sarif"},
+ expectedErr: nil,
+ },
+ {
+ name: "invalid output format",
+ stdoutFormatVar: "invalid",
+ reportPath: []string{"report.json"},
+ expectedErr: fmt.Errorf("invalid output format: invalid, available formats are: json, yaml and sarif"),
+ },
+ {
+ name: "invalid report extension",
+ stdoutFormatVar: "json",
+ reportPath: []string{"report.invalid"},
+ expectedErr: fmt.Errorf("invalid report extension: invalid, available extensions are: json, yaml and sarif"),
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ err := validateFormat(tt.stdoutFormatVar, tt.reportPath)
+ assert.Equal(t, tt.expectedErr, err)
+ })
+ }
+}
+
+func TestInitializeLogLevels(t *testing.T) {
+ testCases := []struct {
+ name string
+ logLevelInput string
+ expectedLevel zerolog.Level
+ }{
+ {"Trace Level", "trace", zerolog.TraceLevel},
+ {"Debug Level", "debug", zerolog.DebugLevel},
+ {"Info Level", "info", zerolog.InfoLevel},
+ {"Warn Level", "warn", zerolog.WarnLevel},
+ {"Error Level with 'error'", "error", zerolog.ErrorLevel},
+ {"Error Level with 'err'", "err", zerolog.ErrorLevel},
+ {"Fatal Level", "fatal", zerolog.FatalLevel},
+ {"Invalid Level Defaults to Info", "invalid", zerolog.InfoLevel},
+ {"Empty Level Defaults to Info", "", zerolog.InfoLevel},
+ }
+
+ for _, tc := range testCases {
+ t.Run(tc.name, func(t *testing.T) {
+ originalRootCmd := rootCmd
+ defer func() { rootCmd = originalRootCmd }()
+ rootCmd = &cobra.Command{
+ Use: "test",
+ Run: func(cmd *cobra.Command, args []string) {
+ cmd.Flags().StringVar(&configFilePath, configFileFlag, "", "")
+ cmd.Flags().StringVar(&logLevelVar, logLevelFlagName, "", "")
+
+ err := cmd.Flags().Set(configFileFlag, "")
+ assert.NoError(t, err)
+
+ err = cmd.Flags().Set(logLevelFlagName, tc.logLevelInput)
+ assert.NoError(t, err)
+
+ initialize()
+
+ assert.Equal(t, tc.expectedLevel, zerolog.GlobalLevel())
+ assert.Equal(t, tc.expectedLevel, log.Logger.GetLevel())
+ },
+ }
+
+ err := rootCmd.Execute()
+ assert.NoError(t, err, "Error executing command")
+ })
+ }
+}
diff --git a/cmd/enum_flags.go b/cmd/enum_flags.go
index 34fad7d8..faa13ca3 100644
--- a/cmd/enum_flags.go
+++ b/cmd/enum_flags.go
@@ -1,37 +1,37 @@
-package cmd
-
-import (
- "flag"
- "fmt"
-)
-
-type ignoreOnExit string
-
-const (
- ignoreOnExitNone ignoreOnExit = "none"
- ignoreOnExitAll ignoreOnExit = "all"
- ignoreOnExitResults ignoreOnExit = "results"
- ignoreOnExitErrors ignoreOnExit = "errors"
-)
-
-// verify that ignoreOnExit implements flag.Value interface
-// https://github.com/uber-go/guide/blob/master/style.md#verify-interface-compliance
-var _ flag.Value = (*ignoreOnExit)(nil)
-
-func (i *ignoreOnExit) String() string {
- return string(*i)
-}
-
-func (i *ignoreOnExit) Set(value string) error {
- switch value {
- case "none", "all", "results", "errors":
- *i = ignoreOnExit(value)
- return nil
- default:
- return fmt.Errorf("invalid value %s", value)
- }
-}
-
-func (i *ignoreOnExit) Type() string {
- return "ignoreOnExit"
-}
+package cmd
+
+import (
+ "flag"
+ "fmt"
+)
+
+type ignoreOnExit string
+
+const (
+ ignoreOnExitNone ignoreOnExit = "none"
+ ignoreOnExitAll ignoreOnExit = "all"
+ ignoreOnExitResults ignoreOnExit = "results"
+ ignoreOnExitErrors ignoreOnExit = "errors"
+)
+
+// verify that ignoreOnExit implements flag.Value interface
+// https://github.com/uber-go/guide/blob/master/style.md#verify-interface-compliance
+var _ flag.Value = (*ignoreOnExit)(nil)
+
+func (i *ignoreOnExit) String() string {
+ return string(*i)
+}
+
+func (i *ignoreOnExit) Set(value string) error {
+ switch value {
+ case "none", "all", "results", "errors":
+ *i = ignoreOnExit(value)
+ return nil
+ default:
+ return fmt.Errorf("invalid value %s", value)
+ }
+}
+
+func (i *ignoreOnExit) Type() string {
+ return "ignoreOnExit"
+}
diff --git a/cmd/enum_flags_test.go b/cmd/enum_flags_test.go
index f07b13d7..82f1947b 100644
--- a/cmd/enum_flags_test.go
+++ b/cmd/enum_flags_test.go
@@ -1,34 +1,34 @@
-package cmd
-
-import (
- "fmt"
- "github.com/stretchr/testify/assert"
- "testing"
-)
-
-func TestIgnoreOnExitSet(t *testing.T) {
- tests := []struct {
- input string
- expected ignoreOnExit
- err bool
- }{
- {"none", ignoreOnExitNone, false},
- {"all", ignoreOnExitAll, false},
- {"results", ignoreOnExitResults, false},
- {"errors", ignoreOnExitErrors, false},
- {"invalid", "", true},
- }
-
- for _, tt := range tests {
- t.Run(fmt.Sprintf("Set(%s)", tt.input), func(t *testing.T) {
- var i ignoreOnExit
- err := i.Set(tt.input)
- if tt.err {
- assert.Error(t, err)
- } else {
- assert.NoError(t, err)
- assert.Equal(t, tt.expected, i)
- }
- })
- }
-}
+package cmd
+
+import (
+ "fmt"
+ "github.com/stretchr/testify/assert"
+ "testing"
+)
+
+func TestIgnoreOnExitSet(t *testing.T) {
+ tests := []struct {
+ input string
+ expected ignoreOnExit
+ err bool
+ }{
+ {"none", ignoreOnExitNone, false},
+ {"all", ignoreOnExitAll, false},
+ {"results", ignoreOnExitResults, false},
+ {"errors", ignoreOnExitErrors, false},
+ {"invalid", "", true},
+ }
+
+ for _, tt := range tests {
+ t.Run(fmt.Sprintf("Set(%s)", tt.input), func(t *testing.T) {
+ var i ignoreOnExit
+ err := i.Set(tt.input)
+ if tt.err {
+ assert.Error(t, err)
+ } else {
+ assert.NoError(t, err)
+ assert.Equal(t, tt.expected, i)
+ }
+ })
+ }
+}
diff --git a/cmd/exit_handler.go b/cmd/exit_handler.go
index 0e116562..4e4cfa28 100644
--- a/cmd/exit_handler.go
+++ b/cmd/exit_handler.go
@@ -1,55 +1,55 @@
-package cmd
-
-import (
- "github.com/rs/zerolog/log"
- "os"
-)
-
-const (
- errorCode = 1
- resultsCode = 2
-)
-
-func isNeedReturnErrorCodeFor(kind ignoreOnExit) bool {
- if ignoreOnExitVar == ignoreOnExitNone {
- return true
- }
-
- if ignoreOnExitVar == ignoreOnExitAll {
- return false
- }
-
- if ignoreOnExitVar != ignoreOnExit(kind) {
- return true
- }
-
- return false
-}
-
-func exitCodeIfError(err error) int {
- if err != nil && isNeedReturnErrorCodeFor("errors") {
- log.Error().Err(err).Msg("Failed to run 2ms")
- return errorCode
- }
-
- return 0
-}
-
-func exitCodeIfResults(resultsCount int) int {
- if resultsCount > 0 && isNeedReturnErrorCodeFor("results") {
- return resultsCode
- }
-
- return 0
-}
-
-func Exit(resultsCount int, err error) {
- os.Exit(exitCodeIfError(err) + exitCodeIfResults(resultsCount))
-}
-
-func listenForErrors(errors chan error) {
- go func() {
- err := <-errors
- Exit(0, err)
- }()
-}
+package cmd
+
+import (
+ "github.com/rs/zerolog/log"
+ "os"
+)
+
+const (
+ errorCode = 1
+ resultsCode = 2
+)
+
+func isNeedReturnErrorCodeFor(kind ignoreOnExit) bool {
+ if ignoreOnExitVar == ignoreOnExitNone {
+ return true
+ }
+
+ if ignoreOnExitVar == ignoreOnExitAll {
+ return false
+ }
+
+ if ignoreOnExitVar != ignoreOnExit(kind) {
+ return true
+ }
+
+ return false
+}
+
+func exitCodeIfError(err error) int {
+ if err != nil && isNeedReturnErrorCodeFor("errors") {
+ log.Error().Err(err).Msg("Failed to run 2ms")
+ return errorCode
+ }
+
+ return 0
+}
+
+func exitCodeIfResults(resultsCount int) int {
+ if resultsCount > 0 && isNeedReturnErrorCodeFor("results") {
+ return resultsCode
+ }
+
+ return 0
+}
+
+func Exit(resultsCount int, err error) {
+ os.Exit(exitCodeIfError(err) + exitCodeIfResults(resultsCount))
+}
+
+func listenForErrors(errors chan error) {
+ go func() {
+ err := <-errors
+ Exit(0, err)
+ }()
+}
diff --git a/cmd/exit_handler_test.go b/cmd/exit_handler_test.go
index b7502386..dc0a1e4a 100644
--- a/cmd/exit_handler_test.go
+++ b/cmd/exit_handler_test.go
@@ -1,146 +1,146 @@
-package cmd
-
-import (
- "fmt"
- "github.com/stretchr/testify/assert"
- "testing"
-)
-
-func TestExitHandler_IsNeedReturnErrorCode(t *testing.T) {
-
- var onErrorsTests = []struct {
- userInput ignoreOnExit
- expectedResult bool
- }{
- {
- userInput: ignoreOnExitNone,
- expectedResult: true,
- },
- {
- userInput: ignoreOnExitAll,
- expectedResult: false,
- },
- {
- userInput: ignoreOnExitResults,
- expectedResult: true,
- },
- {
- userInput: ignoreOnExitErrors,
- expectedResult: false,
- },
- }
-
- for idx, testCase := range onErrorsTests {
- t.Run(fmt.Sprintf("Print test case %d", idx), func(t *testing.T) {
- ignoreOnExitVar = testCase.userInput
- result := isNeedReturnErrorCodeFor("errors")
- if result != testCase.expectedResult {
- t.Errorf("Expected %v, got %v", testCase.expectedResult, result)
- }
- })
- }
-
- var onResultsTests = []struct {
- userInput ignoreOnExit
- expectedResult bool
- }{
- {
- userInput: ignoreOnExitNone,
- expectedResult: true,
- },
- {
- userInput: ignoreOnExitAll,
- expectedResult: false,
- },
- {
- userInput: ignoreOnExitResults,
- expectedResult: false,
- },
- {
- userInput: ignoreOnExitErrors,
- expectedResult: true,
- },
- }
-
- for idx, testCase := range onResultsTests {
- t.Run(fmt.Sprintf("Print test case %d", idx), func(t *testing.T) {
- ignoreOnExitVar = testCase.userInput
- result := isNeedReturnErrorCodeFor("results")
- if result != testCase.expectedResult {
- t.Errorf("Expected %v, got %v", testCase.expectedResult, result)
- }
- })
- }
-}
-
-func TestExitCodeIfError(t *testing.T) {
- testCases := []struct {
- name string
- err error
- ignoreOnExit ignoreOnExit
- expectedCode int
- }{
- {
- name: "No error, ignoreOnExitNone",
- err: nil,
- ignoreOnExit: ignoreOnExitNone,
- expectedCode: 0,
- },
- {
- name: "Error present, ignoreOnExitNone",
- err: fmt.Errorf("sample error"),
- ignoreOnExit: ignoreOnExitNone,
- expectedCode: errorCode,
- },
- {
- name: "Error present, ignoreOnExitAll",
- err: fmt.Errorf("sample error"),
- ignoreOnExit: ignoreOnExitAll,
- expectedCode: 0,
- },
- }
-
- for _, tc := range testCases {
- t.Run(tc.name, func(t *testing.T) {
- ignoreOnExitVar = tc.ignoreOnExit
- code := exitCodeIfError(tc.err)
- assert.Equal(t, tc.expectedCode, code)
- })
- }
-}
-
-func TestExitCodeIfResults(t *testing.T) {
- testCases := []struct {
- name string
- resultsCount int
- ignoreOnExit ignoreOnExit
- expectedCode int
- }{
- {
- name: "No results, ignoreOnExitNone",
- resultsCount: 0,
- ignoreOnExit: ignoreOnExitNone,
- expectedCode: 0,
- },
- {
- name: "Results present, ignoreOnExitNone",
- resultsCount: 5,
- ignoreOnExit: ignoreOnExitNone,
- expectedCode: resultsCode,
- },
- {
- name: "Results present, ignoreOnExitAll",
- resultsCount: 5,
- ignoreOnExit: ignoreOnExitAll,
- expectedCode: 0,
- },
- }
-
- for _, tc := range testCases {
- t.Run(tc.name, func(t *testing.T) {
- ignoreOnExitVar = tc.ignoreOnExit
- code := exitCodeIfResults(tc.resultsCount)
- assert.Equal(t, tc.expectedCode, code)
- })
- }
-}
+package cmd
+
+import (
+ "fmt"
+ "github.com/stretchr/testify/assert"
+ "testing"
+)
+
+func TestExitHandler_IsNeedReturnErrorCode(t *testing.T) {
+
+ var onErrorsTests = []struct {
+ userInput ignoreOnExit
+ expectedResult bool
+ }{
+ {
+ userInput: ignoreOnExitNone,
+ expectedResult: true,
+ },
+ {
+ userInput: ignoreOnExitAll,
+ expectedResult: false,
+ },
+ {
+ userInput: ignoreOnExitResults,
+ expectedResult: true,
+ },
+ {
+ userInput: ignoreOnExitErrors,
+ expectedResult: false,
+ },
+ }
+
+ for idx, testCase := range onErrorsTests {
+ t.Run(fmt.Sprintf("Print test case %d", idx), func(t *testing.T) {
+ ignoreOnExitVar = testCase.userInput
+ result := isNeedReturnErrorCodeFor("errors")
+ if result != testCase.expectedResult {
+ t.Errorf("Expected %v, got %v", testCase.expectedResult, result)
+ }
+ })
+ }
+
+ var onResultsTests = []struct {
+ userInput ignoreOnExit
+ expectedResult bool
+ }{
+ {
+ userInput: ignoreOnExitNone,
+ expectedResult: true,
+ },
+ {
+ userInput: ignoreOnExitAll,
+ expectedResult: false,
+ },
+ {
+ userInput: ignoreOnExitResults,
+ expectedResult: false,
+ },
+ {
+ userInput: ignoreOnExitErrors,
+ expectedResult: true,
+ },
+ }
+
+ for idx, testCase := range onResultsTests {
+ t.Run(fmt.Sprintf("Print test case %d", idx), func(t *testing.T) {
+ ignoreOnExitVar = testCase.userInput
+ result := isNeedReturnErrorCodeFor("results")
+ if result != testCase.expectedResult {
+ t.Errorf("Expected %v, got %v", testCase.expectedResult, result)
+ }
+ })
+ }
+}
+
+func TestExitCodeIfError(t *testing.T) {
+ testCases := []struct {
+ name string
+ err error
+ ignoreOnExit ignoreOnExit
+ expectedCode int
+ }{
+ {
+ name: "No error, ignoreOnExitNone",
+ err: nil,
+ ignoreOnExit: ignoreOnExitNone,
+ expectedCode: 0,
+ },
+ {
+ name: "Error present, ignoreOnExitNone",
+ err: fmt.Errorf("sample error"),
+ ignoreOnExit: ignoreOnExitNone,
+ expectedCode: errorCode,
+ },
+ {
+ name: "Error present, ignoreOnExitAll",
+ err: fmt.Errorf("sample error"),
+ ignoreOnExit: ignoreOnExitAll,
+ expectedCode: 0,
+ },
+ }
+
+ for _, tc := range testCases {
+ t.Run(tc.name, func(t *testing.T) {
+ ignoreOnExitVar = tc.ignoreOnExit
+ code := exitCodeIfError(tc.err)
+ assert.Equal(t, tc.expectedCode, code)
+ })
+ }
+}
+
+func TestExitCodeIfResults(t *testing.T) {
+ testCases := []struct {
+ name string
+ resultsCount int
+ ignoreOnExit ignoreOnExit
+ expectedCode int
+ }{
+ {
+ name: "No results, ignoreOnExitNone",
+ resultsCount: 0,
+ ignoreOnExit: ignoreOnExitNone,
+ expectedCode: 0,
+ },
+ {
+ name: "Results present, ignoreOnExitNone",
+ resultsCount: 5,
+ ignoreOnExit: ignoreOnExitNone,
+ expectedCode: resultsCode,
+ },
+ {
+ name: "Results present, ignoreOnExitAll",
+ resultsCount: 5,
+ ignoreOnExit: ignoreOnExitAll,
+ expectedCode: 0,
+ },
+ }
+
+ for _, tc := range testCases {
+ t.Run(tc.name, func(t *testing.T) {
+ ignoreOnExitVar = tc.ignoreOnExit
+ code := exitCodeIfResults(tc.resultsCount)
+ assert.Equal(t, tc.expectedCode, code)
+ })
+ }
+}
diff --git a/cmd/workers.go b/cmd/workers.go
index 497e8c5d..1c5922ca 100644
--- a/cmd/workers.go
+++ b/cmd/workers.go
@@ -1,23 +1,24 @@
package cmd
import (
- "github.com/checkmarx/2ms/lib/secrets"
- "sync"
-
"github.com/checkmarx/2ms/engine"
"github.com/checkmarx/2ms/engine/extra"
+ "golang.org/x/sync/errgroup"
)
func processItems(engine *engine.Engine, pluginName string) {
defer channels.WaitGroup.Done()
- wgItems := &sync.WaitGroup{}
+ g := errgroup.Group{}
+ g.SetLimit(1000)
for item := range channels.Items {
report.TotalItemsScanned++
- wgItems.Add(1)
- go engine.Detect(item, secretsChan, wgItems, pluginName, channels.Errors)
+ g.Go(func() error {
+ engine.Detect(item, secretsChan, pluginName, channels.Errors)
+ return nil
+ })
}
- wgItems.Wait()
+ g.Wait()
close(secretsChan)
}
@@ -42,37 +43,43 @@ func processSecrets() {
func processSecretsExtras() {
defer channels.WaitGroup.Done()
- wgExtras := &sync.WaitGroup{}
+ g := errgroup.Group{}
+ g.SetLimit(10)
for secret := range secretsExtrasChan {
- wgExtras.Add(1)
- go extra.AddExtraToSecret(secret, wgExtras)
+ g.Go(func() error {
+ extra.AddExtraToSecret(secret)
+ return nil
+ })
}
- wgExtras.Wait()
+ g.Wait()
}
func processValidationAndScoreWithValidation(engine *engine.Engine) {
defer channels.WaitGroup.Done()
- wgValidation := &sync.WaitGroup{}
+ g := errgroup.Group{}
+ g.SetLimit(10)
for secret := range validationChan {
- wgValidation.Add(2)
- go func(secret *secrets.Secret, wg *sync.WaitGroup) {
- engine.RegisterForValidation(secret, wg)
- engine.Score(secret, true, wg)
- }(secret, wgValidation)
+ g.Go(func() error {
+ engine.RegisterForValidation(secret)
+ engine.Score(secret, true)
+ return nil
+ })
}
- wgValidation.Wait()
-
+ g.Wait()
engine.Validate()
}
func processScoreWithoutValidation(engine *engine.Engine) {
defer channels.WaitGroup.Done()
- wgScore := &sync.WaitGroup{}
+ g := errgroup.Group{}
+ g.SetLimit(10)
for secret := range cvssScoreWithoutValidationChan {
- wgScore.Add(1)
- go engine.Score(secret, false, wgScore)
+ g.Go(func() error {
+ engine.Score(secret, false)
+ return nil
+ })
}
- wgScore.Wait()
+ g.Wait()
}
diff --git a/docs/list-of-rules.md b/docs/list-of-rules.md
index 7d041eb7..dc4539b1 100644
--- a/docs/list-of-rules.md
+++ b/docs/list-of-rules.md
@@ -1,171 +1,171 @@
-# Rules
-
-Here is a complete list of all the rules that are currently implemented.
-
-
-| Name | Description | Tags | Validity Check |
-| ---- | ---- | ---- | ---- |
-| adafruit-api-key | Identified a potential Adafruit API Key, which could lead to unauthorized access to Adafruit services and sensitive data exposure. | api-key | |
-| adobe-client-id | Detected a pattern that resembles an Adobe OAuth Web Client ID, posing a risk of compromised Adobe integrations and data breaches. | client-id | |
-| adobe-client-secret | Discovered a potential Adobe Client Secret, which, if exposed, could allow unauthorized Adobe service access and data manipulation. | client-secret | |
-| age secret key | Discovered a potential Age encryption tool secret key, risking data decryption and unauthorized access to sensitive information. | secret-key | |
-| airtable-api-key | Uncovered a possible Airtable API Key, potentially compromising database access and leading to data leakage or alteration. | api-key | |
-| algolia-api-key | Identified an Algolia API Key, which could result in unauthorized search operations and data exposure on Algolia-managed platforms. | api-key | |
-| alibaba-access-key-id | Detected an Alibaba Cloud AccessKey ID, posing a risk of unauthorized cloud resource access and potential data compromise. | access-key,access-id | V |
-| alibaba-secret-key | Discovered a potential Alibaba Cloud Secret Key, potentially allowing unauthorized operations and data access within Alibaba Cloud. | secret-key | V |
-| asana-client-id | Discovered a potential Asana Client ID, risking unauthorized access to Asana projects and sensitive task information. | client-id | |
-| asana-client-secret | Identified an Asana Client Secret, which could lead to compromised project management integrity and unauthorized access. | client-secret | |
-| atlassian-api-token | Detected an Atlassian API token, posing a threat to project management and collaboration tool security and data confidentiality. | api-token | |
-| authress-service-client-access-key | Uncovered a possible Authress Service Client Access Key, which may compromise access control services and sensitive data. | access-token | |
-| aws-access-token | Identified a pattern that may indicate AWS credentials, risking unauthorized cloud resource access and data breaches on AWS platforms. | access-token | |
-| bitbucket-client-id | Discovered a potential Bitbucket Client ID, risking unauthorized repository access and potential codebase exposure. | client-id | |
-| bitbucket-client-secret | Discovered a potential Bitbucket Client Secret, posing a risk of compromised code repositories and unauthorized access. | client-secret | |
-| bittrex-access-key | Identified a Bittrex Access Key, which could lead to unauthorized access to cryptocurrency trading accounts and financial loss. | access-key | |
-| bittrex-secret-key | Detected a Bittrex Secret Key, potentially compromising cryptocurrency transactions and financial security. | secret-key | |
-| beamer-api-token | Detected a Beamer API token, potentially compromising content management and exposing sensitive notifications and updates. | api-token | |
-| codecov-access-token | Found a pattern resembling a Codecov Access Token, posing a risk of unauthorized access to code coverage reports and sensitive data. | access-token | |
-| coinbase-access-token | Detected a Coinbase Access Token, posing a risk of unauthorized access to cryptocurrency accounts and financial transactions. | access-token | |
-| clojars-api-token | Uncovered a possible Clojars API token, risking unauthorized access to Clojure libraries and potential code manipulation. | api-token | |
-| confluent-access-token | Identified a Confluent Access Token, which could compromise access to streaming data platforms and sensitive data flow. | access-token | |
-| confluent-secret-key | Found a Confluent Secret Key, potentially risking unauthorized operations and data access within Confluent services. | secret-key | |
-| contentful-delivery-api-token | Discovered a Contentful delivery API token, posing a risk to content management systems and data integrity. | api-token | |
-| databricks-api-token | Uncovered a Databricks API token, which may compromise big data analytics platforms and sensitive data processing. | api-token | |
-| datadog-access-token | Detected a Datadog Access Token, potentially risking monitoring and analytics data exposure and manipulation. | access-token,client-id | |
-| defined-networking-api-token | Identified a Defined Networking API token, which could lead to unauthorized network operations and data breaches. | api-token | |
-| digitalocean-pat | Discovered a DigitalOcean Personal Access Token, posing a threat to cloud infrastructure security and data privacy. | access-token | |
-| digitalocean-access-token | Found a DigitalOcean OAuth Access Token, risking unauthorized cloud resource access and data compromise. | access-token | |
-| digitalocean-refresh-token | Uncovered a DigitalOcean OAuth Refresh Token, which could allow prolonged unauthorized access and resource manipulation. | refresh-token | |
-| discord-api-token | Detected a Discord API key, potentially compromising communication channels and user data privacy on Discord. | api-key,api-token | |
-| discord-client-id | Identified a Discord client ID, which may lead to unauthorized integrations and data exposure in Discord applications. | client-id | |
-| discord-client-secret | Discovered a potential Discord client secret, risking compromised Discord bot integrations and data leaks. | client-secret | |
-| doppler-api-token | Discovered a Doppler API token, posing a risk to environment and secrets management security. | api-token | |
-| dropbox-api-token | Identified a Dropbox API secret, which could lead to unauthorized file access and data breaches in Dropbox storage. | api-token | |
-| dropbox-short-lived-api-token | Discovered a Dropbox short-lived API token, posing a risk of temporary but potentially harmful data access and manipulation. | api-token | |
-| dropbox-long-lived-api-token | Found a Dropbox long-lived API token, risking prolonged unauthorized access to cloud storage and sensitive data. | api-token | |
-| droneci-access-token | Detected a Droneci Access Token, potentially compromising continuous integration and deployment workflows. | access-token | |
-| duffel-api-token | Uncovered a Duffel API token, which may compromise travel platform integrations and sensitive customer data. | api-token | |
-| dynatrace-api-token | Detected a Dynatrace API token, potentially risking application performance monitoring and data exposure. | api-token | |
-| easypost-api-token | Identified an EasyPost API token, which could lead to unauthorized postal and shipment service access and data exposure. | api-token | |
-| easypost-test-api-token | Detected an EasyPost test API token, risking exposure of test environments and potentially sensitive shipment data. | api-token | |
-| etsy-access-token | Found an Etsy Access Token, potentially compromising Etsy shop management and customer data. | access-token | |
-| facebook | Discovered a Facebook Access Token, posing a risk of unauthorized access to Facebook accounts and personal data exposure. | api-token | |
-| fastly-api-token | Uncovered a Fastly API key, which may compromise CDN and edge cloud services, leading to content delivery and security issues. | api-token,api-key | |
-| finicity-client-secret | Identified a Finicity Client Secret, which could lead to compromised financial service integrations and data breaches. | client-secret | |
-| finicity-api-token | Detected a Finicity API token, potentially risking financial data access and unauthorized financial operations. | api-token | |
-| flickr-access-token | Discovered a Flickr Access Token, posing a risk of unauthorized photo management and potential data leakage. | access-token | |
-| finnhub-access-token | Found a Finnhub Access Token, risking unauthorized access to financial market data and analytics. | access-token | |
-| flutterwave-public-key | Detected a Finicity Public Key, potentially exposing public cryptographic operations and integrations. | public-key | |
-| flutterwave-secret-key | Identified a Flutterwave Secret Key, risking unauthorized financial transactions and data breaches. | secret-key | |
-| flutterwave-encryption-key | Uncovered a Flutterwave Encryption Key, which may compromise payment processing and sensitive financial information. | encryption-key | |
-| frameio-api-token | Found a Frame.io API token, potentially compromising video collaboration and project management. | api-token | |
-| freshbooks-access-token | Discovered a Freshbooks Access Token, posing a risk to accounting software access and sensitive financial data exposure. | access-token | |
-| gcp-api-key | Uncovered a GCP API key, which could lead to unauthorized access to Google Cloud services and data breaches. | api-key | V |
-| generic-api-key | Detected a Generic API Key, potentially exposing access to various services and sensitive operations. | api-key | |
-| github-pat | Uncovered a GitHub Personal Access Token, potentially leading to unauthorized repository access and sensitive content exposure. | access-token | V |
-| github-fine-grained-pat | Found a GitHub Fine-Grained Personal Access Token, risking unauthorized repository access and code manipulation. | access-token | V |
-| github-oauth | Discovered a GitHub OAuth Access Token, posing a risk of compromised GitHub account integrations and data leaks. | access-token | |
-| github-app-token | Identified a GitHub App Token, which may compromise GitHub application integrations and source code security. | access-token | |
-| github-refresh-token | Detected a GitHub Refresh Token, which could allow prolonged unauthorized access to GitHub services. | refresh-token | |
-| gitlab-pat | Identified a GitLab Personal Access Token, risking unauthorized access to GitLab repositories and codebase exposure. | access-token | V |
-| gitlab-ptt | Found a GitLab Pipeline Trigger Token, potentially compromising continuous integration workflows and project security. | trigger-token | |
-| gitlab-rrt | Discovered a GitLab Runner Registration Token, posing a risk to CI/CD pipeline integrity and unauthorized access. | registration-token | |
-| gitter-access-token | Uncovered a Gitter Access Token, which may lead to unauthorized access to chat and communication services. | access-token | |
-| gocardless-api-token | Detected a GoCardless API token, potentially risking unauthorized direct debit payment operations and financial data exposure. | api-token | |
-| grafana-api-key | Identified a Grafana API key, which could compromise monitoring dashboards and sensitive data analytics. | api-key | |
-| grafana-cloud-api-token | Found a Grafana cloud API token, risking unauthorized access to cloud-based monitoring services and data exposure. | api-token | |
-| grafana-service-account-token | Discovered a Grafana service account token, posing a risk of compromised monitoring services and data integrity. | access-token | |
-| hashicorp-tf-api-token | Uncovered a HashiCorp Terraform user/org API token, which may lead to unauthorized infrastructure management and security breaches. | api-token | |
-| hashicorp-tf-password | Identified a HashiCorp Terraform password field, risking unauthorized infrastructure configuration and security breaches. | password | |
-| heroku-api-key | Detected a Heroku API Key, potentially compromising cloud application deployments and operational security. | api-key | |
-| hubspot-api-key | Found a HubSpot API Token, posing a risk to CRM data integrity and unauthorized marketing operations. | api-token,api-key | |
-| huggingface-access-token | Discovered a Hugging Face Access token, which could lead to unauthorized access to AI models and sensitive data. | access-token | |
-| huggingface-organization-api-token | Uncovered a Hugging Face Organization API token, potentially compromising AI organization accounts and associated data. | api-token | |
-| infracost-api-token | Detected an Infracost API Token, risking unauthorized access to cloud cost estimation tools and financial data. | api-token | |
-| intercom-api-key | Identified an Intercom API Token, which could compromise customer communication channels and data privacy. | api-token,api-key | |
-| jfrog-api-key | Found a JFrog API Key, posing a risk of unauthorized access to software artifact repositories and build pipelines. | api-key | |
-| jfrog-identity-token | Discovered a JFrog Identity Token, potentially compromising access to JFrog services and sensitive software artifacts. | access-token | |
-| jwt | Uncovered a JSON Web Token, which may lead to unauthorized access to web applications and sensitive user data. | access-token | |
-| jwt-base64 | Detected a Base64-encoded JSON Web Token, posing a risk of exposing encoded authentication and data exchange information. | access-token | |
-| kraken-access-token | Identified a Kraken Access Token, potentially compromising cryptocurrency trading accounts and financial security. | access-token | |
-| kucoin-access-token | Found a Kucoin Access Token, risking unauthorized access to cryptocurrency exchange services and transactions. | access-token | |
-| kucoin-secret-key | Discovered a Kucoin Secret Key, which could lead to compromised cryptocurrency operations and financial data breaches. | secret-key | |
-| launchdarkly-access-token | Uncovered a Launchdarkly Access Token, potentially compromising feature flag management and application functionality. | access-token | |
-| linear-api-key | Detected a Linear API Token, posing a risk to project management tools and sensitive task data. | api-token,api-key | |
-| linear-client-secret | Identified a Linear Client Secret, which may compromise secure integrations and sensitive project management data. | client-secret | |
-| linkedin-client-id | Found a LinkedIn Client ID, risking unauthorized access to LinkedIn integrations and professional data exposure. | client-id | |
-| linkedin-client-secret | Discovered a LinkedIn Client secret, potentially compromising LinkedIn application integrations and user data. | client-secret | |
-| lob-api-key | Uncovered a Lob API Key, which could lead to unauthorized access to mailing and address verification services. | api-key | |
-| lob-pub-api-key | Detected a Lob Publishable API Key, posing a risk of exposing mail and print service integrations. | api-key | |
-| mailchimp-api-key | Identified a Mailchimp API key, potentially compromising email marketing campaigns and subscriber data. | api-key | |
-| mailgun-pub-key | Discovered a Mailgun public validation key, which could expose email verification processes and associated data. | public-key | |
-| mailgun-private-api-token | Found a Mailgun private API token, risking unauthorized email service operations and data breaches. | private-key | |
-| mailgun-signing-key | Uncovered a Mailgun webhook signing key, potentially compromising email automation and data integrity. | api-key | |
-| mapbox-api-token | Detected a MapBox API token, posing a risk to geospatial services and sensitive location data exposure. | api-token | |
-| mattermost-access-token | Identified a Mattermost Access Token, which may compromise team communication channels and data privacy. | access-token | |
-| messagebird-api-token | Found a MessageBird API token, risking unauthorized access to communication platforms and message data. | api-token | |
-| messagebird-client-id | Discovered a MessageBird client ID, potentially compromising API integrations and sensitive communication data. | client-id | |
-| netlify-access-token | Detected a Netlify Access Token, potentially compromising web hosting services and site management. | access-token | |
-| new-relic-user-api-key | Discovered a New Relic user API Key, which could lead to compromised application insights and performance monitoring. | api-key | |
-| new-relic-user-api-id | Found a New Relic user API ID, posing a risk to application monitoring services and data integrity. | access-id | |
-| new-relic-browser-api-token | Identified a New Relic ingest browser API token, risking unauthorized access to application performance data and analytics. | api-token | |
-| npm-access-token | Uncovered an npm access token, potentially compromising package management and code repository access. | access-token | |
-| nytimes-access-token | Detected a Nytimes Access Token, risking unauthorized access to New York Times APIs and content services. | access-token | |
-| okta-access-token | Identified an Okta Access Token, which may compromise identity management services and user authentication data. | access-token | |
-| openai-api-key | Found an OpenAI API Key, posing a risk of unauthorized access to AI services and data manipulation. | api-key | |
-| plaid-client-id | Uncovered a Plaid Client ID, which could lead to unauthorized financial service integrations and data breaches. | client-id | |
-| planetscale-password | Discovered a PlanetScale password, which could lead to unauthorized database operations and data breaches. | password | |
-| planetscale-api-token | Identified a PlanetScale API token, potentially compromising database management and operations. | api-token | |
-| planetscale-oauth-token | Found a PlanetScale OAuth token, posing a risk to database access control and sensitive data integrity. | access-token | |
-| postman-api-token | Uncovered a Postman API token, potentially compromising API testing and development workflows. | api-token | |
-| prefect-api-token | Detected a Prefect API token, risking unauthorized access to workflow management and automation services. | api-token | |
-| private-key | Identified a Private Key, which may compromise cryptographic security and sensitive data encryption. | private-key | |
-| pulumi-api-token | Found a Pulumi API token, posing a risk to infrastructure as code services and cloud resource management. | api-token | |
-| pypi-upload-token | Discovered a PyPI upload token, potentially compromising Python package distribution and repository integrity. | upload-token | |
-| rapidapi-access-token | Uncovered a RapidAPI Access Token, which could lead to unauthorized access to various APIs and data services. | access-token | |
-| readme-api-token | Detected a Readme API token, risking unauthorized documentation management and content exposure. | api-token | |
-| rubygems-api-token | Identified a Rubygem API token, potentially compromising Ruby library distribution and package management. | api-token | |
-| sendbird-access-id | Discovered a Sendbird Access ID, which could compromise chat and messaging platform integrations. | access-id | |
-| sendbird-access-token | Uncovered a Sendbird Access Token, potentially risking unauthorized access to communication services and user data. | access-token | |
-| sendgrid-api-token | Detected a SendGrid API token, posing a risk of unauthorized email service operations and data exposure. | api-token | |
-| sendinblue-api-token | Identified a Sendinblue API token, which may compromise email marketing services and subscriber data privacy. | api-token | |
-| sentry-access-token | Found a Sentry Access Token, risking unauthorized access to error tracking services and sensitive application data. | access-token | |
-| shippo-api-token | Discovered a Shippo API token, potentially compromising shipping services and customer order data. | api-token | |
-| shopify-access-token | Uncovered a Shopify access token, which could lead to unauthorized e-commerce platform access and data breaches. | access-token | |
-| shopify-custom-access-token | Detected a Shopify custom access token, potentially compromising custom app integrations and e-commerce data security. | access-token | |
-| shopify-private-app-access-token | Identified a Shopify private app access token, risking unauthorized access to private app data and store operations. | access-token | |
-| shopify-shared-secret | Found a Shopify shared secret, posing a risk to application authentication and e-commerce platform security. | public-secret | |
-| sidekiq-secret | Discovered a Sidekiq Secret, which could lead to compromised background job processing and application data breaches. | secret-key | |
-| sidekiq-sensitive-url | Uncovered a Sidekiq Sensitive URL, potentially exposing internal job queues and sensitive operation details. | sensitive-url | |
-| slack-bot-token | Identified a Slack Bot token, which may compromise bot integrations and communication channel security. | access-token | |
-| slack-app-token | Detected a Slack App-level token, risking unauthorized access to Slack applications and workspace data. | access-token | |
-| slack-legacy-token | Detected a Slack Legacy token, risking unauthorized access to older Slack integrations and user data. | access-token | |
-| slack-user-token | Found a Slack User token, posing a risk of unauthorized user impersonation and data access within Slack workspaces. | access-token | |
-| slack-config-access-token | Found a Slack Configuration access token, posing a risk to workspace configuration and sensitive data access. | access-token | |
-| slack-config-refresh-token | Discovered a Slack Configuration refresh token, potentially allowing prolonged unauthorized access to configuration settings. | refresh-token | |
-| slack-legacy-bot-token | Uncovered a Slack Legacy bot token, which could lead to compromised legacy bot operations and data exposure. | access-token | |
-| slack-legacy-workspace-token | Identified a Slack Legacy Workspace token, potentially compromising access to workspace data and legacy features. | access-token | |
-| slack-webhook-url | Discovered a Slack Webhook, which could lead to unauthorized message posting and data leakage in Slack channels. | webhook | |
-| stripe-access-token | Found a Stripe Access Token, posing a risk to payment processing services and sensitive financial data. | access-token | |
-| square-access-token | Detected a Square Access Token, risking unauthorized payment processing and financial transaction exposure. | access-token | |
-| squarespace-access-token | Identified a Squarespace Access Token, which may compromise website management and content control on Squarespace. | access-token | |
-| sumologic-access-token | Uncovered a SumoLogic Access Token, which could lead to unauthorized access to log data and analytics insights. | access-token | |
-| snyk-api-token | Uncovered a Snyk API token, potentially compromising software vulnerability scanning and code security. | api-key | |
-| microsoft-teams-webhook | Uncovered a Microsoft Teams Webhook, which could lead to unauthorized access to team collaboration tools and data leaks. | webhook | |
-| telegram-bot-api-token | Detected a Telegram Bot API Token, risking unauthorized bot operations and message interception on Telegram. | api-token | |
-| travisci-access-token | Identified a Travis CI Access Token, potentially compromising continuous integration services and codebase security. | access-token | |
-| twilio-api-key | Found a Twilio API Key, posing a risk to communication services and sensitive customer interaction data. | api-key | |
-| twitch-api-token | Discovered a Twitch API token, which could compromise streaming services and account integrations. | api-token | |
-| twitter-api-key | Identified a Twitter API Key, which may compromise Twitter application integrations and user data security. | api-key | |
-| twitter-api-secret | Found a Twitter API Secret, risking the security of Twitter app integrations and sensitive data access. | api-key | |
-| twitter-access-token | Detected a Twitter Access Token, posing a risk of unauthorized account operations and social media data exposure. | access-token | |
-| twitter-access-secret | Uncovered a Twitter Access Secret, potentially risking unauthorized Twitter integrations and data breaches. | public-secret | |
-| twitter-bearer-token | Discovered a Twitter Bearer Token, potentially compromising API access and data retrieval from Twitter. | api-token | |
-| typeform-api-token | Uncovered a Typeform API token, which could lead to unauthorized survey management and data collection. | api-token | |
-| vault-batch-token | Detected a Vault Batch Token, risking unauthorized access to secret management services and sensitive data. | api-token | |
-| vault-service-token | Identified a Vault Service Token, potentially compromising infrastructure security and access to sensitive credentials. | api-token | |
-| yandex-api-key | Discovered a Yandex API Key, which could lead to unauthorized access to Yandex services and data manipulation. | api-key | |
-| yandex-aws-access-token | Uncovered a Yandex AWS Access Token, potentially compromising cloud resource access and data security on Yandex Cloud. | access-token | |
-| yandex-access-token | Found a Yandex Access Token, posing a risk to Yandex service integrations and user data privacy. | access-token | |
-| zendesk-secret-key | Detected a Zendesk Secret Key, risking unauthorized access to customer support services and sensitive ticketing data. | secret-key | |
-| authenticated-url | Identify username:password inside URLS | sensitive-url | |
-
+# Rules
+
+Here is a complete list of all the rules that are currently implemented.
+
+
+| Name | Description | Tags | Validity Check |
+| ---- | ---- | ---- | ---- |
+| adafruit-api-key | Identified a potential Adafruit API Key, which could lead to unauthorized access to Adafruit services and sensitive data exposure. | api-key | |
+| adobe-client-id | Detected a pattern that resembles an Adobe OAuth Web Client ID, posing a risk of compromised Adobe integrations and data breaches. | client-id | |
+| adobe-client-secret | Discovered a potential Adobe Client Secret, which, if exposed, could allow unauthorized Adobe service access and data manipulation. | client-secret | |
+| age secret key | Discovered a potential Age encryption tool secret key, risking data decryption and unauthorized access to sensitive information. | secret-key | |
+| airtable-api-key | Uncovered a possible Airtable API Key, potentially compromising database access and leading to data leakage or alteration. | api-key | |
+| algolia-api-key | Identified an Algolia API Key, which could result in unauthorized search operations and data exposure on Algolia-managed platforms. | api-key | |
+| alibaba-access-key-id | Detected an Alibaba Cloud AccessKey ID, posing a risk of unauthorized cloud resource access and potential data compromise. | access-key,access-id | V |
+| alibaba-secret-key | Discovered a potential Alibaba Cloud Secret Key, potentially allowing unauthorized operations and data access within Alibaba Cloud. | secret-key | V |
+| asana-client-id | Discovered a potential Asana Client ID, risking unauthorized access to Asana projects and sensitive task information. | client-id | |
+| asana-client-secret | Identified an Asana Client Secret, which could lead to compromised project management integrity and unauthorized access. | client-secret | |
+| atlassian-api-token | Detected an Atlassian API token, posing a threat to project management and collaboration tool security and data confidentiality. | api-token | |
+| authress-service-client-access-key | Uncovered a possible Authress Service Client Access Key, which may compromise access control services and sensitive data. | access-token | |
+| aws-access-token | Identified a pattern that may indicate AWS credentials, risking unauthorized cloud resource access and data breaches on AWS platforms. | access-token | |
+| bitbucket-client-id | Discovered a potential Bitbucket Client ID, risking unauthorized repository access and potential codebase exposure. | client-id | |
+| bitbucket-client-secret | Discovered a potential Bitbucket Client Secret, posing a risk of compromised code repositories and unauthorized access. | client-secret | |
+| bittrex-access-key | Identified a Bittrex Access Key, which could lead to unauthorized access to cryptocurrency trading accounts and financial loss. | access-key | |
+| bittrex-secret-key | Detected a Bittrex Secret Key, potentially compromising cryptocurrency transactions and financial security. | secret-key | |
+| beamer-api-token | Detected a Beamer API token, potentially compromising content management and exposing sensitive notifications and updates. | api-token | |
+| codecov-access-token | Found a pattern resembling a Codecov Access Token, posing a risk of unauthorized access to code coverage reports and sensitive data. | access-token | |
+| coinbase-access-token | Detected a Coinbase Access Token, posing a risk of unauthorized access to cryptocurrency accounts and financial transactions. | access-token | |
+| clojars-api-token | Uncovered a possible Clojars API token, risking unauthorized access to Clojure libraries and potential code manipulation. | api-token | |
+| confluent-access-token | Identified a Confluent Access Token, which could compromise access to streaming data platforms and sensitive data flow. | access-token | |
+| confluent-secret-key | Found a Confluent Secret Key, potentially risking unauthorized operations and data access within Confluent services. | secret-key | |
+| contentful-delivery-api-token | Discovered a Contentful delivery API token, posing a risk to content management systems and data integrity. | api-token | |
+| databricks-api-token | Uncovered a Databricks API token, which may compromise big data analytics platforms and sensitive data processing. | api-token | |
+| datadog-access-token | Detected a Datadog Access Token, potentially risking monitoring and analytics data exposure and manipulation. | access-token,client-id | |
+| defined-networking-api-token | Identified a Defined Networking API token, which could lead to unauthorized network operations and data breaches. | api-token | |
+| digitalocean-pat | Discovered a DigitalOcean Personal Access Token, posing a threat to cloud infrastructure security and data privacy. | access-token | |
+| digitalocean-access-token | Found a DigitalOcean OAuth Access Token, risking unauthorized cloud resource access and data compromise. | access-token | |
+| digitalocean-refresh-token | Uncovered a DigitalOcean OAuth Refresh Token, which could allow prolonged unauthorized access and resource manipulation. | refresh-token | |
+| discord-api-token | Detected a Discord API key, potentially compromising communication channels and user data privacy on Discord. | api-key,api-token | |
+| discord-client-id | Identified a Discord client ID, which may lead to unauthorized integrations and data exposure in Discord applications. | client-id | |
+| discord-client-secret | Discovered a potential Discord client secret, risking compromised Discord bot integrations and data leaks. | client-secret | |
+| doppler-api-token | Discovered a Doppler API token, posing a risk to environment and secrets management security. | api-token | |
+| dropbox-api-token | Identified a Dropbox API secret, which could lead to unauthorized file access and data breaches in Dropbox storage. | api-token | |
+| dropbox-short-lived-api-token | Discovered a Dropbox short-lived API token, posing a risk of temporary but potentially harmful data access and manipulation. | api-token | |
+| dropbox-long-lived-api-token | Found a Dropbox long-lived API token, risking prolonged unauthorized access to cloud storage and sensitive data. | api-token | |
+| droneci-access-token | Detected a Droneci Access Token, potentially compromising continuous integration and deployment workflows. | access-token | |
+| duffel-api-token | Uncovered a Duffel API token, which may compromise travel platform integrations and sensitive customer data. | api-token | |
+| dynatrace-api-token | Detected a Dynatrace API token, potentially risking application performance monitoring and data exposure. | api-token | |
+| easypost-api-token | Identified an EasyPost API token, which could lead to unauthorized postal and shipment service access and data exposure. | api-token | |
+| easypost-test-api-token | Detected an EasyPost test API token, risking exposure of test environments and potentially sensitive shipment data. | api-token | |
+| etsy-access-token | Found an Etsy Access Token, potentially compromising Etsy shop management and customer data. | access-token | |
+| facebook | Discovered a Facebook Access Token, posing a risk of unauthorized access to Facebook accounts and personal data exposure. | api-token | |
+| fastly-api-token | Uncovered a Fastly API key, which may compromise CDN and edge cloud services, leading to content delivery and security issues. | api-token,api-key | |
+| finicity-client-secret | Identified a Finicity Client Secret, which could lead to compromised financial service integrations and data breaches. | client-secret | |
+| finicity-api-token | Detected a Finicity API token, potentially risking financial data access and unauthorized financial operations. | api-token | |
+| flickr-access-token | Discovered a Flickr Access Token, posing a risk of unauthorized photo management and potential data leakage. | access-token | |
+| finnhub-access-token | Found a Finnhub Access Token, risking unauthorized access to financial market data and analytics. | access-token | |
+| flutterwave-public-key | Detected a Finicity Public Key, potentially exposing public cryptographic operations and integrations. | public-key | |
+| flutterwave-secret-key | Identified a Flutterwave Secret Key, risking unauthorized financial transactions and data breaches. | secret-key | |
+| flutterwave-encryption-key | Uncovered a Flutterwave Encryption Key, which may compromise payment processing and sensitive financial information. | encryption-key | |
+| frameio-api-token | Found a Frame.io API token, potentially compromising video collaboration and project management. | api-token | |
+| freshbooks-access-token | Discovered a Freshbooks Access Token, posing a risk to accounting software access and sensitive financial data exposure. | access-token | |
+| gcp-api-key | Uncovered a GCP API key, which could lead to unauthorized access to Google Cloud services and data breaches. | api-key | V |
+| generic-api-key | Detected a Generic API Key, potentially exposing access to various services and sensitive operations. | api-key | |
+| github-pat | Uncovered a GitHub Personal Access Token, potentially leading to unauthorized repository access and sensitive content exposure. | access-token | V |
+| github-fine-grained-pat | Found a GitHub Fine-Grained Personal Access Token, risking unauthorized repository access and code manipulation. | access-token | V |
+| github-oauth | Discovered a GitHub OAuth Access Token, posing a risk of compromised GitHub account integrations and data leaks. | access-token | |
+| github-app-token | Identified a GitHub App Token, which may compromise GitHub application integrations and source code security. | access-token | |
+| github-refresh-token | Detected a GitHub Refresh Token, which could allow prolonged unauthorized access to GitHub services. | refresh-token | |
+| gitlab-pat | Identified a GitLab Personal Access Token, risking unauthorized access to GitLab repositories and codebase exposure. | access-token | V |
+| gitlab-ptt | Found a GitLab Pipeline Trigger Token, potentially compromising continuous integration workflows and project security. | trigger-token | |
+| gitlab-rrt | Discovered a GitLab Runner Registration Token, posing a risk to CI/CD pipeline integrity and unauthorized access. | registration-token | |
+| gitter-access-token | Uncovered a Gitter Access Token, which may lead to unauthorized access to chat and communication services. | access-token | |
+| gocardless-api-token | Detected a GoCardless API token, potentially risking unauthorized direct debit payment operations and financial data exposure. | api-token | |
+| grafana-api-key | Identified a Grafana API key, which could compromise monitoring dashboards and sensitive data analytics. | api-key | |
+| grafana-cloud-api-token | Found a Grafana cloud API token, risking unauthorized access to cloud-based monitoring services and data exposure. | api-token | |
+| grafana-service-account-token | Discovered a Grafana service account token, posing a risk of compromised monitoring services and data integrity. | access-token | |
+| hashicorp-tf-api-token | Uncovered a HashiCorp Terraform user/org API token, which may lead to unauthorized infrastructure management and security breaches. | api-token | |
+| hashicorp-tf-password | Identified a HashiCorp Terraform password field, risking unauthorized infrastructure configuration and security breaches. | password | |
+| heroku-api-key | Detected a Heroku API Key, potentially compromising cloud application deployments and operational security. | api-key | |
+| hubspot-api-key | Found a HubSpot API Token, posing a risk to CRM data integrity and unauthorized marketing operations. | api-token,api-key | |
+| huggingface-access-token | Discovered a Hugging Face Access token, which could lead to unauthorized access to AI models and sensitive data. | access-token | |
+| huggingface-organization-api-token | Uncovered a Hugging Face Organization API token, potentially compromising AI organization accounts and associated data. | api-token | |
+| infracost-api-token | Detected an Infracost API Token, risking unauthorized access to cloud cost estimation tools and financial data. | api-token | |
+| intercom-api-key | Identified an Intercom API Token, which could compromise customer communication channels and data privacy. | api-token,api-key | |
+| jfrog-api-key | Found a JFrog API Key, posing a risk of unauthorized access to software artifact repositories and build pipelines. | api-key | |
+| jfrog-identity-token | Discovered a JFrog Identity Token, potentially compromising access to JFrog services and sensitive software artifacts. | access-token | |
+| jwt | Uncovered a JSON Web Token, which may lead to unauthorized access to web applications and sensitive user data. | access-token | |
+| jwt-base64 | Detected a Base64-encoded JSON Web Token, posing a risk of exposing encoded authentication and data exchange information. | access-token | |
+| kraken-access-token | Identified a Kraken Access Token, potentially compromising cryptocurrency trading accounts and financial security. | access-token | |
+| kucoin-access-token | Found a Kucoin Access Token, risking unauthorized access to cryptocurrency exchange services and transactions. | access-token | |
+| kucoin-secret-key | Discovered a Kucoin Secret Key, which could lead to compromised cryptocurrency operations and financial data breaches. | secret-key | |
+| launchdarkly-access-token | Uncovered a Launchdarkly Access Token, potentially compromising feature flag management and application functionality. | access-token | |
+| linear-api-key | Detected a Linear API Token, posing a risk to project management tools and sensitive task data. | api-token,api-key | |
+| linear-client-secret | Identified a Linear Client Secret, which may compromise secure integrations and sensitive project management data. | client-secret | |
+| linkedin-client-id | Found a LinkedIn Client ID, risking unauthorized access to LinkedIn integrations and professional data exposure. | client-id | |
+| linkedin-client-secret | Discovered a LinkedIn Client secret, potentially compromising LinkedIn application integrations and user data. | client-secret | |
+| lob-api-key | Uncovered a Lob API Key, which could lead to unauthorized access to mailing and address verification services. | api-key | |
+| lob-pub-api-key | Detected a Lob Publishable API Key, posing a risk of exposing mail and print service integrations. | api-key | |
+| mailchimp-api-key | Identified a Mailchimp API key, potentially compromising email marketing campaigns and subscriber data. | api-key | |
+| mailgun-pub-key | Discovered a Mailgun public validation key, which could expose email verification processes and associated data. | public-key | |
+| mailgun-private-api-token | Found a Mailgun private API token, risking unauthorized email service operations and data breaches. | private-key | |
+| mailgun-signing-key | Uncovered a Mailgun webhook signing key, potentially compromising email automation and data integrity. | api-key | |
+| mapbox-api-token | Detected a MapBox API token, posing a risk to geospatial services and sensitive location data exposure. | api-token | |
+| mattermost-access-token | Identified a Mattermost Access Token, which may compromise team communication channels and data privacy. | access-token | |
+| messagebird-api-token | Found a MessageBird API token, risking unauthorized access to communication platforms and message data. | api-token | |
+| messagebird-client-id | Discovered a MessageBird client ID, potentially compromising API integrations and sensitive communication data. | client-id | |
+| netlify-access-token | Detected a Netlify Access Token, potentially compromising web hosting services and site management. | access-token | |
+| new-relic-user-api-key | Discovered a New Relic user API Key, which could lead to compromised application insights and performance monitoring. | api-key | |
+| new-relic-user-api-id | Found a New Relic user API ID, posing a risk to application monitoring services and data integrity. | access-id | |
+| new-relic-browser-api-token | Identified a New Relic ingest browser API token, risking unauthorized access to application performance data and analytics. | api-token | |
+| npm-access-token | Uncovered an npm access token, potentially compromising package management and code repository access. | access-token | |
+| nytimes-access-token | Detected a Nytimes Access Token, risking unauthorized access to New York Times APIs and content services. | access-token | |
+| okta-access-token | Identified an Okta Access Token, which may compromise identity management services and user authentication data. | access-token | |
+| openai-api-key | Found an OpenAI API Key, posing a risk of unauthorized access to AI services and data manipulation. | api-key | |
+| plaid-client-id | Uncovered a Plaid Client ID, which could lead to unauthorized financial service integrations and data breaches. | client-id | |
+| planetscale-password | Discovered a PlanetScale password, which could lead to unauthorized database operations and data breaches. | password | |
+| planetscale-api-token | Identified a PlanetScale API token, potentially compromising database management and operations. | api-token | |
+| planetscale-oauth-token | Found a PlanetScale OAuth token, posing a risk to database access control and sensitive data integrity. | access-token | |
+| postman-api-token | Uncovered a Postman API token, potentially compromising API testing and development workflows. | api-token | |
+| prefect-api-token | Detected a Prefect API token, risking unauthorized access to workflow management and automation services. | api-token | |
+| private-key | Identified a Private Key, which may compromise cryptographic security and sensitive data encryption. | private-key | |
+| pulumi-api-token | Found a Pulumi API token, posing a risk to infrastructure as code services and cloud resource management. | api-token | |
+| pypi-upload-token | Discovered a PyPI upload token, potentially compromising Python package distribution and repository integrity. | upload-token | |
+| rapidapi-access-token | Uncovered a RapidAPI Access Token, which could lead to unauthorized access to various APIs and data services. | access-token | |
+| readme-api-token | Detected a Readme API token, risking unauthorized documentation management and content exposure. | api-token | |
+| rubygems-api-token | Identified a Rubygem API token, potentially compromising Ruby library distribution and package management. | api-token | |
+| sendbird-access-id | Discovered a Sendbird Access ID, which could compromise chat and messaging platform integrations. | access-id | |
+| sendbird-access-token | Uncovered a Sendbird Access Token, potentially risking unauthorized access to communication services and user data. | access-token | |
+| sendgrid-api-token | Detected a SendGrid API token, posing a risk of unauthorized email service operations and data exposure. | api-token | |
+| sendinblue-api-token | Identified a Sendinblue API token, which may compromise email marketing services and subscriber data privacy. | api-token | |
+| sentry-access-token | Found a Sentry Access Token, risking unauthorized access to error tracking services and sensitive application data. | access-token | |
+| shippo-api-token | Discovered a Shippo API token, potentially compromising shipping services and customer order data. | api-token | |
+| shopify-access-token | Uncovered a Shopify access token, which could lead to unauthorized e-commerce platform access and data breaches. | access-token | |
+| shopify-custom-access-token | Detected a Shopify custom access token, potentially compromising custom app integrations and e-commerce data security. | access-token | |
+| shopify-private-app-access-token | Identified a Shopify private app access token, risking unauthorized access to private app data and store operations. | access-token | |
+| shopify-shared-secret | Found a Shopify shared secret, posing a risk to application authentication and e-commerce platform security. | public-secret | |
+| sidekiq-secret | Discovered a Sidekiq Secret, which could lead to compromised background job processing and application data breaches. | secret-key | |
+| sidekiq-sensitive-url | Uncovered a Sidekiq Sensitive URL, potentially exposing internal job queues and sensitive operation details. | sensitive-url | |
+| slack-bot-token | Identified a Slack Bot token, which may compromise bot integrations and communication channel security. | access-token | |
+| slack-app-token | Detected a Slack App-level token, risking unauthorized access to Slack applications and workspace data. | access-token | |
+| slack-legacy-token | Detected a Slack Legacy token, risking unauthorized access to older Slack integrations and user data. | access-token | |
+| slack-user-token | Found a Slack User token, posing a risk of unauthorized user impersonation and data access within Slack workspaces. | access-token | |
+| slack-config-access-token | Found a Slack Configuration access token, posing a risk to workspace configuration and sensitive data access. | access-token | |
+| slack-config-refresh-token | Discovered a Slack Configuration refresh token, potentially allowing prolonged unauthorized access to configuration settings. | refresh-token | |
+| slack-legacy-bot-token | Uncovered a Slack Legacy bot token, which could lead to compromised legacy bot operations and data exposure. | access-token | |
+| slack-legacy-workspace-token | Identified a Slack Legacy Workspace token, potentially compromising access to workspace data and legacy features. | access-token | |
+| slack-webhook-url | Discovered a Slack Webhook, which could lead to unauthorized message posting and data leakage in Slack channels. | webhook | |
+| stripe-access-token | Found a Stripe Access Token, posing a risk to payment processing services and sensitive financial data. | access-token | |
+| square-access-token | Detected a Square Access Token, risking unauthorized payment processing and financial transaction exposure. | access-token | |
+| squarespace-access-token | Identified a Squarespace Access Token, which may compromise website management and content control on Squarespace. | access-token | |
+| sumologic-access-token | Uncovered a SumoLogic Access Token, which could lead to unauthorized access to log data and analytics insights. | access-token | |
+| snyk-api-token | Uncovered a Snyk API token, potentially compromising software vulnerability scanning and code security. | api-key | |
+| microsoft-teams-webhook | Uncovered a Microsoft Teams Webhook, which could lead to unauthorized access to team collaboration tools and data leaks. | webhook | |
+| telegram-bot-api-token | Detected a Telegram Bot API Token, risking unauthorized bot operations and message interception on Telegram. | api-token | |
+| travisci-access-token | Identified a Travis CI Access Token, potentially compromising continuous integration services and codebase security. | access-token | |
+| twilio-api-key | Found a Twilio API Key, posing a risk to communication services and sensitive customer interaction data. | api-key | |
+| twitch-api-token | Discovered a Twitch API token, which could compromise streaming services and account integrations. | api-token | |
+| twitter-api-key | Identified a Twitter API Key, which may compromise Twitter application integrations and user data security. | api-key | |
+| twitter-api-secret | Found a Twitter API Secret, risking the security of Twitter app integrations and sensitive data access. | api-key | |
+| twitter-access-token | Detected a Twitter Access Token, posing a risk of unauthorized account operations and social media data exposure. | access-token | |
+| twitter-access-secret | Uncovered a Twitter Access Secret, potentially risking unauthorized Twitter integrations and data breaches. | public-secret | |
+| twitter-bearer-token | Discovered a Twitter Bearer Token, potentially compromising API access and data retrieval from Twitter. | api-token | |
+| typeform-api-token | Uncovered a Typeform API token, which could lead to unauthorized survey management and data collection. | api-token | |
+| vault-batch-token | Detected a Vault Batch Token, risking unauthorized access to secret management services and sensitive data. | api-token | |
+| vault-service-token | Identified a Vault Service Token, potentially compromising infrastructure security and access to sensitive credentials. | api-token | |
+| yandex-api-key | Discovered a Yandex API Key, which could lead to unauthorized access to Yandex services and data manipulation. | api-key | |
+| yandex-aws-access-token | Uncovered a Yandex AWS Access Token, potentially compromising cloud resource access and data security on Yandex Cloud. | access-token | |
+| yandex-access-token | Found a Yandex Access Token, posing a risk to Yandex service integrations and user data privacy. | access-token | |
+| zendesk-secret-key | Detected a Zendesk Secret Key, risking unauthorized access to customer support services and sensitive ticketing data. | secret-key | |
+| authenticated-url | Identify username:password inside URLS | sensitive-url | |
+
diff --git a/engine/config.go b/engine/config.go
index fa342c4b..4866edc4 100644
--- a/engine/config.go
+++ b/engine/config.go
@@ -1,25 +1,25 @@
-package engine
-
-import (
- "regexp"
-
- "github.com/zricethezav/gitleaks/v8/config"
-)
-
-// Taken from gitleaks config https://github.com/gitleaks/gitleaks/blob/6c52f878cc48a513849900a9aa6f9d68e1c2dbdd/config/gitleaks.toml#L15-L26
-var cfg = config.Config{
- Allowlist: config.Allowlist{
- Paths: []*regexp.Regexp{
- regexp.MustCompile(`gitleaks.toml`),
- regexp.MustCompile(`(.*?)(jpg|gif|doc|docx|zip|xls|pdf|bin|svg|socket|vsidx|v2|suo|wsuo|.dll|pdb|exe)$`),
- regexp.MustCompile(`(go.mod|go.sum)$`),
- regexp.MustCompile(`gradle.lockfile`),
- regexp.MustCompile(`node_modules`),
- regexp.MustCompile(`package-lock.json`),
- regexp.MustCompile(`yarn.lock`),
- regexp.MustCompile(`pnpm-lock.yaml`),
- regexp.MustCompile(`Database.refactorlog`),
- regexp.MustCompile(`vendor`),
- },
- },
-}
+package engine
+
+import (
+ "regexp"
+
+ "github.com/zricethezav/gitleaks/v8/config"
+)
+
+// Taken from gitleaks config https://github.com/gitleaks/gitleaks/blob/6c52f878cc48a513849900a9aa6f9d68e1c2dbdd/config/gitleaks.toml#L15-L26
+var cfg = config.Config{
+ Allowlist: config.Allowlist{
+ Paths: []*regexp.Regexp{
+ regexp.MustCompile(`gitleaks.toml`),
+ regexp.MustCompile(`(.*?)(jpg|gif|doc|docx|zip|xls|pdf|bin|svg|socket|vsidx|v2|suo|wsuo|.dll|pdb|exe)$`),
+ regexp.MustCompile(`(go.mod|go.sum)$`),
+ regexp.MustCompile(`gradle.lockfile`),
+ regexp.MustCompile(`node_modules`),
+ regexp.MustCompile(`package-lock.json`),
+ regexp.MustCompile(`yarn.lock`),
+ regexp.MustCompile(`pnpm-lock.yaml`),
+ regexp.MustCompile(`Database.refactorlog`),
+ regexp.MustCompile(`vendor`),
+ },
+ },
+}
diff --git a/engine/engine.go b/engine/engine.go
index 03a69353..46d13b69 100644
--- a/engine/engine.go
+++ b/engine/engine.go
@@ -3,14 +3,14 @@ package engine
import (
"crypto/sha1"
"fmt"
- "github.com/checkmarx/2ms/engine/linecontent"
- "github.com/checkmarx/2ms/engine/score"
"os"
"regexp"
"strings"
- "sync"
"text/tabwriter"
+ "github.com/checkmarx/2ms/engine/linecontent"
+ "github.com/checkmarx/2ms/engine/score"
+
"github.com/checkmarx/2ms/engine/rules"
"github.com/checkmarx/2ms/engine/validation"
"github.com/checkmarx/2ms/lib/secrets"
@@ -78,9 +78,7 @@ func Init(engineConfig EngineConfig) (*Engine, error) {
}, nil
}
-func (e *Engine) Detect(item plugins.ISourceItem, secretsChannel chan *secrets.Secret, wg *sync.WaitGroup, pluginName string, errors chan error) {
- defer wg.Done()
-
+func (e *Engine) Detect(item plugins.ISourceItem, secretsChannel chan *secrets.Secret, pluginName string, errors chan error) {
fragment := detect.Fragment{
Raw: *item.GetContent(),
FilePath: item.GetSource(),
@@ -137,13 +135,11 @@ func (e *Engine) AddRegexRules(patterns []string) error {
return nil
}
-func (s *Engine) RegisterForValidation(secret *secrets.Secret, wg *sync.WaitGroup) {
- defer wg.Done()
+func (s *Engine) RegisterForValidation(secret *secrets.Secret) {
s.validator.RegisterForValidation(secret)
}
-func (s *Engine) Score(secret *secrets.Secret, validateFlag bool, wg *sync.WaitGroup) {
- defer wg.Done()
+func (s *Engine) Score(secret *secrets.Secret, validateFlag bool) {
validationStatus := secrets.UnknownResult // default validity
if validateFlag {
validationStatus = secret.ValidationStatus
diff --git a/engine/engine_test.go b/engine/engine_test.go
index d31fa39e..3db7490b 100644
--- a/engine/engine_test.go
+++ b/engine/engine_test.go
@@ -2,10 +2,10 @@ package engine
import (
"fmt"
- "github.com/stretchr/testify/assert"
- "sync"
"testing"
+ "github.com/stretchr/testify/assert"
+
"github.com/checkmarx/2ms/engine/rules"
"github.com/checkmarx/2ms/lib/secrets"
"github.com/checkmarx/2ms/plugins"
@@ -79,9 +79,7 @@ func TestDetector(t *testing.T) {
secretsChan := make(chan *secrets.Secret, 1)
errorsChan := make(chan error, 1)
- wg := &sync.WaitGroup{}
- wg.Add(1)
- detector.Detect(i, secretsChan, wg, fsPlugin.GetName(), errorsChan)
+ detector.Detect(i, secretsChan, fsPlugin.GetName(), errorsChan)
close(secretsChan)
s := <-secretsChan
@@ -155,9 +153,7 @@ func TestSecrets(t *testing.T) {
fmt.Printf("Start test %s", name)
secretsChan := make(chan *secrets.Secret, 1)
errorsChan := make(chan error, 1)
- wg := &sync.WaitGroup{}
- wg.Add(1)
- detector.Detect(item{content: &secret.Content}, secretsChan, wg, fsPlugin.GetName(), errorsChan)
+ detector.Detect(item{content: &secret.Content}, secretsChan, fsPlugin.GetName(), errorsChan)
close(secretsChan)
close(errorsChan)
diff --git a/engine/extra/extra.go b/engine/extra/extra.go
index 638e4855..150ef02a 100644
--- a/engine/extra/extra.go
+++ b/engine/extra/extra.go
@@ -1,61 +1,59 @@
-package extra
-
-import (
- "encoding/base64"
- "encoding/json"
- "fmt"
- "strings"
- "sync"
-
- "github.com/checkmarx/2ms/lib/secrets"
-)
-
-type addExtraFunc = func(*secrets.Secret) interface{}
-
-var ruleIDToFunction = map[string]addExtraFunc{
- "jwt": addExtraJWT,
-}
-
-func AddExtraToSecret(secret *secrets.Secret, wg *sync.WaitGroup) {
- defer wg.Done()
- if addExtra, ok := ruleIDToFunction[secret.RuleID]; ok {
- extraData := addExtra(secret)
- if extraData != nil && extraData != "" {
- UpdateExtraField(secret, "secretDetails", extraData)
- }
- }
-}
-
-var mtxs = &NamedMutex{}
-
-func UpdateExtraField(secret *secrets.Secret, extraName string, extraData interface{}) {
- mtxs.Lock(secret.ID)
- defer mtxs.Unlock(secret.ID)
-
- if secret.ExtraDetails == nil {
- secret.ExtraDetails = make(map[string]interface{})
- }
- secret.ExtraDetails[extraName] = extraData
-}
-
-func addExtraJWT(secret *secrets.Secret) interface{} {
- tokenString := secret.Value
-
- parts := strings.Split(tokenString, ".")
- if len(parts) != 3 {
- return "Invalid JWT token"
- }
-
- payload, err := base64.RawURLEncoding.DecodeString(parts[1])
- if err != nil {
- return fmt.Sprintf("Failed to decode JWT payload: %s", err)
- }
-
- var claims map[string]interface{}
- err = json.Unmarshal(payload, &claims)
- if err != nil {
- return fmt.Sprintf("Failed to unmarshal JWT payload: %s", string(payload))
- }
-
- return claims
-}
+package extra
+
+import (
+ "encoding/base64"
+ "encoding/json"
+ "fmt"
+ "strings"
+
+ "github.com/checkmarx/2ms/lib/secrets"
+)
+
+type addExtraFunc = func(*secrets.Secret) interface{}
+
+var ruleIDToFunction = map[string]addExtraFunc{
+ "jwt": addExtraJWT,
+}
+
+func AddExtraToSecret(secret *secrets.Secret) {
+ if addExtra, ok := ruleIDToFunction[secret.RuleID]; ok {
+ extraData := addExtra(secret)
+ if extraData != nil && extraData != "" {
+ UpdateExtraField(secret, "secretDetails", extraData)
+ }
+ }
+}
+
+var mtxs = &NamedMutex{}
+
+func UpdateExtraField(secret *secrets.Secret, extraName string, extraData interface{}) {
+ mtxs.Lock(secret.ID)
+ defer mtxs.Unlock(secret.ID)
+
+ if secret.ExtraDetails == nil {
+ secret.ExtraDetails = make(map[string]interface{})
+ }
+ secret.ExtraDetails[extraName] = extraData
+}
+
+func addExtraJWT(secret *secrets.Secret) interface{} {
+ tokenString := secret.Value
+
+ parts := strings.Split(tokenString, ".")
+ if len(parts) != 3 {
+ return "Invalid JWT token"
+ }
+
+ payload, err := base64.RawURLEncoding.DecodeString(parts[1])
+ if err != nil {
+ return fmt.Sprintf("Failed to decode JWT payload: %s", err)
+ }
+
+ var claims map[string]interface{}
+ err = json.Unmarshal(payload, &claims)
+ if err != nil {
+ return fmt.Sprintf("Failed to unmarshal JWT payload: %s", string(payload))
+ }
+
+ return claims
+}
diff --git a/engine/extra/extra_test.go b/engine/extra/extra_test.go
index c4fce11e..2a8482db 100644
--- a/engine/extra/extra_test.go
+++ b/engine/extra/extra_test.go
@@ -1,61 +1,58 @@
-package extra
-
-import (
- "encoding/base64"
- "fmt"
- "github.com/checkmarx/2ms/lib/secrets"
- "github.com/stretchr/testify/assert"
- "sync"
- "testing"
-)
-
-func TestAddExtraToSecret(t *testing.T) {
- tests := []struct {
- name string
- secretValue string
- expectedOutput interface{}
- }{
- {
- name: "Valid JWT",
- secretValue: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6Im1vY2tOYW1lIn0.dummysignature",
- expectedOutput: map[string]interface{}{
- "sub": "1234567890",
- "name": "mockName",
- },
- },
- {
- name: "Invalid JWT format - it should contain exactly three parts separated by '.'",
- secretValue: "invalidJWT.token",
- expectedOutput: "Invalid JWT token",
- },
- {
- name: "Base64 decoding failure",
- secretValue: "header." + base64.RawURLEncoding.EncodeToString([]byte("invalid_payload")) + ".signature",
- expectedOutput: "Failed to unmarshal JWT payload: invalid_payload",
- },
- {
- name: "Malformed base64",
- secretValue: fmt.Sprintf("header.%s.signature",
- base64.RawURLEncoding.EncodeToString([]byte("{malformed_json"))),
- expectedOutput: "Failed to unmarshal JWT payload: {malformed_json",
- },
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- secret := &secrets.Secret{
- ID: "test-secret",
- RuleID: "jwt",
- Value: tt.secretValue,
- ExtraDetails: make(map[string]interface{}),
- }
-
- var wg sync.WaitGroup
- wg.Add(1)
- AddExtraToSecret(secret, &wg)
- wg.Wait()
-
- assert.Equal(t, tt.expectedOutput, secret.ExtraDetails["secretDetails"])
- })
- }
-}
+package extra
+
+import (
+ "encoding/base64"
+ "fmt"
+ "testing"
+
+ "github.com/checkmarx/2ms/lib/secrets"
+ "github.com/stretchr/testify/assert"
+)
+
+func TestAddExtraToSecret(t *testing.T) {
+ tests := []struct {
+ name string
+ secretValue string
+ expectedOutput interface{}
+ }{
+ {
+ name: "Valid JWT",
+ secretValue: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6Im1vY2tOYW1lIn0.dummysignature",
+ expectedOutput: map[string]interface{}{
+ "sub": "1234567890",
+ "name": "mockName",
+ },
+ },
+ {
+ name: "Invalid JWT format - it should contain exactly three parts separated by '.'",
+ secretValue: "invalidJWT.token",
+ expectedOutput: "Invalid JWT token",
+ },
+ {
+ name: "Base64 decoding failure",
+ secretValue: "header." + base64.RawURLEncoding.EncodeToString([]byte("invalid_payload")) + ".signature",
+ expectedOutput: "Failed to unmarshal JWT payload: invalid_payload",
+ },
+ {
+ name: "Malformed base64",
+ secretValue: fmt.Sprintf("header.%s.signature",
+ base64.RawURLEncoding.EncodeToString([]byte("{malformed_json"))),
+ expectedOutput: "Failed to unmarshal JWT payload: {malformed_json",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ secret := &secrets.Secret{
+ ID: "test-secret",
+ RuleID: "jwt",
+ Value: tt.secretValue,
+ ExtraDetails: make(map[string]interface{}),
+ }
+
+ AddExtraToSecret(secret)
+
+ assert.Equal(t, tt.expectedOutput, secret.ExtraDetails["secretDetails"])
+ })
+ }
+}
diff --git a/engine/extra/mutex.go b/engine/extra/mutex.go
index 30e4f669..dc46129e 100644
--- a/engine/extra/mutex.go
+++ b/engine/extra/mutex.go
@@ -1,21 +1,21 @@
-package extra
-
-import (
- "sync"
-)
-
-type NamedMutex struct {
- mutexes sync.Map
-}
-
-func (n *NamedMutex) Lock(key string) {
- mu, _ := n.mutexes.LoadOrStore(key, &sync.Mutex{})
- mu.(*sync.Mutex).Lock()
-}
-
-func (n *NamedMutex) Unlock(key string) {
- mu, ok := n.mutexes.Load(key)
- if ok {
- mu.(*sync.Mutex).Unlock()
- }
-}
+package extra
+
+import (
+ "sync"
+)
+
+type NamedMutex struct {
+ mutexes sync.Map
+}
+
+func (n *NamedMutex) Lock(key string) {
+ mu, _ := n.mutexes.LoadOrStore(key, &sync.Mutex{})
+ mu.(*sync.Mutex).Lock()
+}
+
+func (n *NamedMutex) Unlock(key string) {
+ mu, ok := n.mutexes.Load(key)
+ if ok {
+ mu.(*sync.Mutex).Unlock()
+ }
+}
diff --git a/engine/rules/authenticated_url.go b/engine/rules/authenticated_url.go
index 21c8f30b..6373cac9 100644
--- a/engine/rules/authenticated_url.go
+++ b/engine/rules/authenticated_url.go
@@ -1,40 +1,40 @@
-package rules
-
-import (
- "regexp"
-
- "github.com/zricethezav/gitleaks/v8/config"
-)
-
-func AuthenticatedURL() *config.Rule {
- regex, _ := regexp.Compile(`:\/\/(\w+:\w\S+)@\S+\.\S+`)
- rule := config.Rule{
- Description: "Identify username:password inside URLS",
- RuleID: "authenticated-url",
- Regex: regex,
- Keywords: []string{"://"},
- SecretGroup: 1,
- Allowlist: config.Allowlist{
- StopWords: []string{"password", "pass"},
- },
- }
-
- tPositives := []string{
- "mongodb+srv://radar:mytoken@io.dbb.mongodb.net/?retryWrites=true&w=majority",
- "--output=https://elastic:bF21iC0bfTVXo3qhpJqTGs78@c22f5bc9787c4c268d3b069ad866bdc2.eu-central-1.aws.cloud.es.io:9243/tfs",
- "https://abc:123@google.com",
- }
-
- fPositives := []string{
- "https://google.com",
- "https://google.com?user=abc&password=123",
- `
`,
- `my [Linkedin](https://www.linkedin.com/in/rodriguesjeffdev/) or email: rodriguesjeff.dev@gmail.com`,
- `[](mailto:vaibhav.hariramani01@gmail.com)`,
- `https://situmops:$(github_token)@github.com/$(Build.Repository.Name).git`,
- `'$cmd "unilinks://@@malformed.invalid.url/path?"$cmdSuffix',`,
- `Uri.parse('http://login:password@192.168.0.1:8888'),`,
- }
-
- return validate(rule, tPositives, fPositives)
-}
+package rules
+
+import (
+ "regexp"
+
+ "github.com/zricethezav/gitleaks/v8/config"
+)
+
+func AuthenticatedURL() *config.Rule {
+ regex, _ := regexp.Compile(`:\/\/(\w+:\w\S+)@\S+\.\S+`)
+ rule := config.Rule{
+ Description: "Identify username:password inside URLS",
+ RuleID: "authenticated-url",
+ Regex: regex,
+ Keywords: []string{"://"},
+ SecretGroup: 1,
+ Allowlist: config.Allowlist{
+ StopWords: []string{"password", "pass"},
+ },
+ }
+
+ tPositives := []string{
+ "mongodb+srv://radar:mytoken@io.dbb.mongodb.net/?retryWrites=true&w=majority",
+ "--output=https://elastic:bF21iC0bfTVXo3qhpJqTGs78@c22f5bc9787c4c268d3b069ad866bdc2.eu-central-1.aws.cloud.es.io:9243/tfs",
+ "https://abc:123@google.com",
+ }
+
+ fPositives := []string{
+ "https://google.com",
+ "https://google.com?user=abc&password=123",
+ `
`,
+ `my [Linkedin](https://www.linkedin.com/in/rodriguesjeffdev/) or email: rodriguesjeff.dev@gmail.com`,
+ `[](mailto:vaibhav.hariramani01@gmail.com)`,
+ `https://situmops:$(github_token)@github.com/$(Build.Repository.Name).git`,
+ `'$cmd "unilinks://@@malformed.invalid.url/path?"$cmdSuffix',`,
+ `Uri.parse('http://login:password@192.168.0.1:8888'),`,
+ }
+
+ return validate(rule, tPositives, fPositives)
+}
diff --git a/engine/rules/hardcodedPassword.go b/engine/rules/hardcodedPassword.go
index f9a59fbc..3aa28086 100644
--- a/engine/rules/hardcodedPassword.go
+++ b/engine/rules/hardcodedPassword.go
@@ -1,68 +1,68 @@
-package rules
-
-import (
- "regexp"
-
- "github.com/zricethezav/gitleaks/v8/cmd/generate/config/rules"
- "github.com/zricethezav/gitleaks/v8/config"
-)
-
-func HardcodedPassword() *config.Rule {
- // This regex is the output regex of 'generic-api-key' rule from gitleaks, with the next changes:
- // 1. gitleaks/gitleaks#1267
- // 2. gitleaks/gitleaks#1265
- // 3. Minimum length of 4 characters (was 10)
- regex, _ := regexp.Compile(`(?i)(?:key|api|token|secret|client|passwd|password|auth|access)(?:[0-9a-z\-_\t .]{0,20})(?:\s|'\s|"|\\){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|\"|\\|\s|=|\x60){0,5}([0-9a-z\-_.=!@#\$%\^\&\*]{4,150})(?:['|\"|\\|\n|\r|\s|\x60|;|<]|$)`)
- rule := config.Rule{
- Description: "Hardcoded password",
- RuleID: "hardcoded-password",
- Regex: regex,
- Keywords: []string{
- "key",
- "api",
- "token",
- "secret",
- "client",
- "passwd",
- "password",
- "auth",
- "access",
- },
- Entropy: 0,
- SecretGroup: 1,
- Allowlist: config.Allowlist{
- StopWords: rules.DefaultStopWords,
- },
- }
-
- tPositives := []string{
- `"client_id" : "0afae57f3ccfd9d7f5767067bc48b30f719e271ba470488056e37ab35d4b6506"`,
- `"client_secret" : "6da89121079f83b2eb6acccf8219ea982c3d79bccc3e9c6a85856480661f8fde",`,
- `"password: 'edf8f16608465858a6c9e3cccb97d3c2'"`,
- ``,
- `"client_id" : "edf8f16608465858a6c9e3cccb97d3c2"`,
- "https://google.com?user=abc&password=1234",
- `{ "access-key": "6da89121079f83b2eb6acccf8219ea982c3d79bccc", }`,
- `"{ \"access-key\": \"6da89121079f83b2eb6acccf8219ea982c3d79bccc\", }"`,
- "edf8f16608465858a6c9e3cccb97d3c2",
- "M_DB_PASSWORD= edf8f16608465858a6c9e3cccb97d3c2",
- `"client_secret" : "4v7b9n2k5h",`, // entropy: 3.32
- `"password: 'comp123!'"`,
- "MyComp9876", // entropy: 3.32
- ``,
- "M_DB_PASSWORD= edf8f16608465858a6c9e3cccb97d3c2",
- }
-
- fPositives := []string{
- `client_vpn_endpoint_id = aws_ec2_client_vpn_endpoint.client-vpn-endpoint.id`,
- `password combination.
-
- R5: Regulatory--21`,
- "GITHUB_TOKEN: ${GITHUB_TOKEN}",
- "password = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'", // Stop word
- "password = 'your_password_here'", // Stop word
-
- }
-
- return validate(rule, tPositives, fPositives)
-}
+package rules
+
+import (
+ "regexp"
+
+ "github.com/zricethezav/gitleaks/v8/cmd/generate/config/rules"
+ "github.com/zricethezav/gitleaks/v8/config"
+)
+
+func HardcodedPassword() *config.Rule {
+ // This regex is the output regex of 'generic-api-key' rule from gitleaks, with the next changes:
+ // 1. gitleaks/gitleaks#1267
+ // 2. gitleaks/gitleaks#1265
+ // 3. Minimum length of 4 characters (was 10)
+ regex, _ := regexp.Compile(`(?i)(?:key|api|token|secret|client|passwd|password|auth|access)(?:[0-9a-z\-_\t .]{0,20})(?:\s|'\s|"|\\){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|\"|\\|\s|=|\x60){0,5}([0-9a-z\-_.=!@#\$%\^\&\*]{4,150})(?:['|\"|\\|\n|\r|\s|\x60|;|<]|$)`)
+ rule := config.Rule{
+ Description: "Hardcoded password",
+ RuleID: "hardcoded-password",
+ Regex: regex,
+ Keywords: []string{
+ "key",
+ "api",
+ "token",
+ "secret",
+ "client",
+ "passwd",
+ "password",
+ "auth",
+ "access",
+ },
+ Entropy: 0,
+ SecretGroup: 1,
+ Allowlist: config.Allowlist{
+ StopWords: rules.DefaultStopWords,
+ },
+ }
+
+ tPositives := []string{
+ `"client_id" : "0afae57f3ccfd9d7f5767067bc48b30f719e271ba470488056e37ab35d4b6506"`,
+ `"client_secret" : "6da89121079f83b2eb6acccf8219ea982c3d79bccc3e9c6a85856480661f8fde",`,
+ `"password: 'edf8f16608465858a6c9e3cccb97d3c2'"`,
+ ``,
+ `"client_id" : "edf8f16608465858a6c9e3cccb97d3c2"`,
+ "https://google.com?user=abc&password=1234",
+ `{ "access-key": "6da89121079f83b2eb6acccf8219ea982c3d79bccc", }`,
+ `"{ \"access-key\": \"6da89121079f83b2eb6acccf8219ea982c3d79bccc\", }"`,
+ "edf8f16608465858a6c9e3cccb97d3c2",
+ "M_DB_PASSWORD= edf8f16608465858a6c9e3cccb97d3c2",
+ `"client_secret" : "4v7b9n2k5h",`, // entropy: 3.32
+ `"password: 'comp123!'"`,
+ "MyComp9876", // entropy: 3.32
+ ``,
+ "M_DB_PASSWORD= edf8f16608465858a6c9e3cccb97d3c2",
+ }
+
+ fPositives := []string{
+ `client_vpn_endpoint_id = aws_ec2_client_vpn_endpoint.client-vpn-endpoint.id`,
+ `password combination.
+
+ R5: Regulatory--21`,
+ "GITHUB_TOKEN: ${GITHUB_TOKEN}",
+ "password = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'", // Stop word
+ "password = 'your_password_here'", // Stop word
+
+ }
+
+ return validate(rule, tPositives, fPositives)
+}
diff --git a/engine/rules/plaid.go b/engine/rules/plaid.go
index 4c6ba51e..d36b7659 100644
--- a/engine/rules/plaid.go
+++ b/engine/rules/plaid.go
@@ -1,27 +1,27 @@
-package rules
-
-import (
- "github.com/zricethezav/gitleaks/v8/cmd/generate/secrets"
- "github.com/zricethezav/gitleaks/v8/config"
-)
-
-// Using this local version because gitleaks has entropy as 3.5, which causes issues on this rule's validation
-func PlaidAccessID() *config.Rule {
- // define rule
- r := config.Rule{
- RuleID: "plaid-client-id",
- Description: "Uncovered a Plaid Client ID, which could lead to unauthorized financial service integrations and data breaches.",
- Regex: generateSemiGenericRegex([]string{"plaid"}, alphaNumeric("24"), true),
-
- Entropy: 3.0,
- Keywords: []string{
- "plaid",
- },
- }
-
- // validate
- tps := []string{
- generateSampleSecret("plaid", secrets.NewSecret(alphaNumeric("24"))),
- }
- return validate(r, tps, nil)
-}
+package rules
+
+import (
+ "github.com/zricethezav/gitleaks/v8/cmd/generate/secrets"
+ "github.com/zricethezav/gitleaks/v8/config"
+)
+
+// Using this local version because gitleaks has entropy as 3.5, which causes issues on this rule's validation
+func PlaidAccessID() *config.Rule {
+ // define rule
+ r := config.Rule{
+ RuleID: "plaid-client-id",
+ Description: "Uncovered a Plaid Client ID, which could lead to unauthorized financial service integrations and data breaches.",
+ Regex: generateSemiGenericRegex([]string{"plaid"}, alphaNumeric("24"), true),
+
+ Entropy: 3.0,
+ Keywords: []string{
+ "plaid",
+ },
+ }
+
+ // validate
+ tps := []string{
+ generateSampleSecret("plaid", secrets.NewSecret(alphaNumeric("24"))),
+ }
+ return validate(r, tps, nil)
+}
diff --git a/engine/rules/privateKey.go b/engine/rules/privateKey.go
index d68e2f97..36025eab 100644
--- a/engine/rules/privateKey.go
+++ b/engine/rules/privateKey.go
@@ -1,30 +1,30 @@
-package rules
-
-import (
- "github.com/zricethezav/gitleaks/v8/config"
- "regexp"
-)
-
-func PrivateKey() *config.Rule {
- // define rule
- r := config.Rule{
- Description: "Identified a Private Key, which may compromise cryptographic security and sensitive data encryption.",
- RuleID: "private-key",
- Regex: regexp.MustCompile(`(?i)-----BEGIN[ A-Z0-9_-]{0,100}PRIVATE KEY(?: BLOCK)?-----[\s\S-]*?KEY(?: BLOCK)?-----`),
- Keywords: []string{"-----BEGIN"},
- }
-
- // validate
- tps := []string{`-----BEGIN PRIVATE KEY-----
-anything
------END PRIVATE KEY-----`,
- `-----BEGIN RSA PRIVATE KEY-----
-abcdefghijklmnopqrstuvwxyz
------END RSA PRIVATE KEY-----
-`,
- `-----BEGIN PRIVATE KEY BLOCK-----
-anything
------END PRIVATE KEY BLOCK-----`,
- }
- return validate(r, tps, nil)
-}
+package rules
+
+import (
+ "github.com/zricethezav/gitleaks/v8/config"
+ "regexp"
+)
+
+func PrivateKey() *config.Rule {
+ // define rule
+ r := config.Rule{
+ Description: "Identified a Private Key, which may compromise cryptographic security and sensitive data encryption.",
+ RuleID: "private-key",
+ Regex: regexp.MustCompile(`(?i)-----BEGIN[ A-Z0-9_-]{0,100}PRIVATE KEY(?: BLOCK)?-----[\s\S-]*?KEY(?: BLOCK)?-----`),
+ Keywords: []string{"-----BEGIN"},
+ }
+
+ // validate
+ tps := []string{`-----BEGIN PRIVATE KEY-----
+anything
+-----END PRIVATE KEY-----`,
+ `-----BEGIN RSA PRIVATE KEY-----
+abcdefghijklmnopqrstuvwxyz
+-----END RSA PRIVATE KEY-----
+`,
+ `-----BEGIN PRIVATE KEY BLOCK-----
+anything
+-----END PRIVATE KEY BLOCK-----`,
+ }
+ return validate(r, tps, nil)
+}
diff --git a/engine/rules/rule.go b/engine/rules/rule.go
index c25f441d..a2928df3 100644
--- a/engine/rules/rule.go
+++ b/engine/rules/rule.go
@@ -1,56 +1,56 @@
-package rules
-
-import (
- "strings"
-
- "github.com/rs/zerolog/log"
- "github.com/zricethezav/gitleaks/v8/config"
- "github.com/zricethezav/gitleaks/v8/detect"
-)
-
-type ScoreParameters struct {
- Category RuleCategory
- RuleType uint8
-}
-
-type Rule struct {
- Rule config.Rule
- Tags []string
- ScoreParameters ScoreParameters
-}
-
-// Copied from https://github.com/gitleaks/gitleaks/blob/463d24618fa42fc7629dc30c9744ebe36c5df1ab/cmd/generate/config/rules/rule.go
-func validate(r config.Rule, truePositives []string, falsePositives []string) *config.Rule {
- // normalize keywords like in the config package
- var keywords []string
- for _, k := range r.Keywords {
- keywords = append(keywords, strings.ToLower(k))
- }
- r.Keywords = keywords
-
- rules := make(map[string]config.Rule)
- rules[r.RuleID] = r
- d := detect.NewDetector(config.Config{
- Rules: rules,
- Keywords: keywords,
- })
- for _, tp := range truePositives {
- if len(d.DetectString(tp)) != 1 {
- log.Fatal(). // lint:ignore This Fatal happens in a test
- Str("rule", r.RuleID).
- Str("value", tp).
- Str("regex", r.Regex.String()).
- Msg("Failed to Validate. True positive was not detected by regex.")
- }
- }
- for _, fp := range falsePositives {
- if len(d.DetectString(fp)) != 0 {
- log.Fatal(). // lint:ignore This Fatal happens in a test
- Str("rule", r.RuleID).
- Str("value", fp).
- Str("regex", r.Regex.String()).
- Msg("Failed to Validate. False positive was detected by regex.")
- }
- }
- return &r
-}
+package rules
+
+import (
+ "strings"
+
+ "github.com/rs/zerolog/log"
+ "github.com/zricethezav/gitleaks/v8/config"
+ "github.com/zricethezav/gitleaks/v8/detect"
+)
+
+type ScoreParameters struct {
+ Category RuleCategory
+ RuleType uint8
+}
+
+type Rule struct {
+ Rule config.Rule
+ Tags []string
+ ScoreParameters ScoreParameters
+}
+
+// Copied from https://github.com/gitleaks/gitleaks/blob/463d24618fa42fc7629dc30c9744ebe36c5df1ab/cmd/generate/config/rules/rule.go
+func validate(r config.Rule, truePositives []string, falsePositives []string) *config.Rule {
+ // normalize keywords like in the config package
+ var keywords []string
+ for _, k := range r.Keywords {
+ keywords = append(keywords, strings.ToLower(k))
+ }
+ r.Keywords = keywords
+
+ rules := make(map[string]config.Rule)
+ rules[r.RuleID] = r
+ d := detect.NewDetector(config.Config{
+ Rules: rules,
+ Keywords: keywords,
+ })
+ for _, tp := range truePositives {
+ if len(d.DetectString(tp)) != 1 {
+ log.Fatal(). // lint:ignore This Fatal happens in a test
+ Str("rule", r.RuleID).
+ Str("value", tp).
+ Str("regex", r.Regex.String()).
+ Msg("Failed to Validate. True positive was not detected by regex.")
+ }
+ }
+ for _, fp := range falsePositives {
+ if len(d.DetectString(fp)) != 0 {
+ log.Fatal(). // lint:ignore This Fatal happens in a test
+ Str("rule", r.RuleID).
+ Str("value", fp).
+ Str("regex", r.Regex.String()).
+ Msg("Failed to Validate. False positive was detected by regex.")
+ }
+ }
+ return &r
+}
diff --git a/engine/rules/rules_test.go b/engine/rules/rules_test.go
index 2a36a125..caefc000 100644
--- a/engine/rules/rules_test.go
+++ b/engine/rules/rules_test.go
@@ -1,308 +1,308 @@
-package rules
-
-import (
- "testing"
-
- "github.com/zricethezav/gitleaks/v8/config"
-)
-
-func TestLoadAllRules(t *testing.T) {
- rules := getDefaultRules()
-
- if len(*rules) <= 1 {
- t.Error("no rules were loaded")
- }
-}
-
-func TestLoadAllRules_DuplicateRuleID(t *testing.T) {
- ruleIDMap := make(map[string]bool)
- allRules := getDefaultRules()
-
- for _, rule := range *allRules {
- if _, ok := ruleIDMap[rule.Rule.RuleID]; ok {
- t.Errorf("duplicate rule id found: %s", rule.Rule.RuleID)
- }
-
- ruleIDMap[rule.Rule.RuleID] = true
- }
-}
-
-func Test_FilterRules_SelectRules(t *testing.T) {
- specialRule := HardcodedPassword()
- allRules := *getDefaultRules()
- rulesCount := len(allRules)
-
- tests := []struct {
- name string
- selectedList []string
- ignoreList []string
- specialList []string
- expectedLen int
- }{
- {
- name: "selected flag used for one rule",
- selectedList: []string{allRules[0].Rule.RuleID},
- ignoreList: []string{},
- expectedLen: 1,
- },
- {
- name: "selected flag used for multiple rules",
- selectedList: []string{allRules[0].Rule.RuleID, allRules[1].Rule.RuleID},
- ignoreList: []string{},
- expectedLen: 2,
- },
- {
- name: "ignore flag used for one rule",
- selectedList: []string{},
- ignoreList: []string{allRules[0].Rule.RuleID},
- expectedLen: rulesCount - 1,
- },
- {
- name: "ignore flag used for multiple rules",
- selectedList: []string{},
- ignoreList: []string{allRules[0].Rule.RuleID, allRules[1].Rule.RuleID},
- expectedLen: rulesCount - 2,
- },
- {
- name: "selected and ignore flags used together for different rules",
- selectedList: []string{allRules[0].Rule.RuleID},
- ignoreList: []string{allRules[1].Rule.RuleID},
- expectedLen: 1,
- },
- {
- name: "selected and ignore flags used together for the same rule",
- selectedList: []string{allRules[0].Rule.RuleID},
- ignoreList: []string{allRules[0].Rule.RuleID},
- expectedLen: 0,
- },
- {
- name: "non existent select flag",
- selectedList: []string{"non-existent-tag-name"},
- ignoreList: []string{},
- expectedLen: 0,
- },
- {
- name: "non existent ignore flag",
- selectedList: []string{},
- ignoreList: []string{"non-existent-tag-name"},
- expectedLen: rulesCount,
- },
- {
- name: "no flags",
- selectedList: []string{},
- ignoreList: []string{},
- expectedLen: rulesCount,
- },
- {
- name: "add special rule",
- selectedList: []string{},
- ignoreList: []string{},
- specialList: []string{specialRule.RuleID},
- expectedLen: rulesCount + 1,
- },
- {
- name: "select regular rule and special rule",
- selectedList: []string{allRules[0].Rule.RuleID},
- ignoreList: []string{},
- specialList: []string{specialRule.RuleID},
- expectedLen: 2,
- },
- {
- name: "select regular rule and ignore it- should keep it",
- selectedList: []string{"non-existent-tag-name"},
- ignoreList: []string{specialRule.RuleID},
- specialList: []string{specialRule.RuleID},
- expectedLen: 1,
- },
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- secrets := *FilterRules(tt.selectedList, tt.ignoreList, tt.specialList)
-
- if len(secrets) != tt.expectedLen {
- t.Errorf("expected %d rules, but got %d", tt.expectedLen, len(secrets))
- }
- })
- }
-}
-
-func TestSelectRules(t *testing.T) {
- testCases := []struct {
- name string
- allRules *[]Rule
- tags []string
- expectedResult map[string]config.Rule
- }{
- {
- name: "No matching tags",
- allRules: &[]Rule{
- createRule("rule1", "tag1", "tag2"),
- createRule("rule2", "tag3", "tag4"),
- },
- tags: []string{"tag5", "tag6"},
- expectedResult: map[string]config.Rule{},
- },
- {
- name: "Matching rule ID",
- allRules: &[]Rule{
- createRule("rule1", "tag1", "tag2"),
- createRule("rule2", "tag3", "tag4"),
- },
- tags: []string{"rule1"},
- expectedResult: createRules("rule1"),
- },
- {
- name: "Matching tag",
- allRules: &[]Rule{
- createRule("rule1", "tag1", "tag2"),
- createRule("rule2", "tag3", "tag4"),
- },
- tags: []string{"tag2"},
- expectedResult: createRules("rule1"),
- },
- {
- name: "Matching tag and rule ID",
- allRules: &[]Rule{
- createRule("rule1", "tag1", "tag2"),
- createRule("rule2", "tag3", "tag4"),
- },
- tags: []string{"rule1", "tag2"},
- expectedResult: createRules("rule1"),
- },
- {
- name: "Matching multiple tags",
- allRules: &[]Rule{
- createRule("rule1", "tag1", "tag2"),
- createRule("rule2", "tag3", "tag4"),
- createRule("rule3", "tag2", "tag4"),
- },
- tags: []string{"tag2", "tag4"},
- expectedResult: createRules("rule1", "rule2", "rule3"),
- },
- }
-
- for _, tc := range testCases {
- t.Run(tc.name, func(t *testing.T) {
- result := rulesToMap(selectRules(tc.allRules, tc.tags))
-
- if len(result) != len(tc.expectedResult) {
- t.Errorf("Expected %d rules to be applied, but got %d", len(tc.expectedResult), len(result))
- }
-
- for ruleID, expectedRule := range tc.expectedResult {
- if _, ok := result[ruleID]; !ok {
- t.Errorf("Expected rule %s to be applied, but it was not", ruleID)
- } else {
- if result[ruleID].RuleID != expectedRule.RuleID {
- t.Errorf("Expected rule %s to have RuleID %s, but it had RuleID %s", ruleID, expectedRule.RuleID, result[ruleID].RuleID)
- }
- }
- }
- })
- }
-}
-
-func createRule(ruleID string, tags ...string) Rule {
- return Rule{
- Rule: config.Rule{
- RuleID: ruleID,
- },
- Tags: tags,
- }
-}
-
-func createRules(ruleIDs ...string) map[string]config.Rule {
- rules := make(map[string]config.Rule)
- for _, ruleID := range ruleIDs {
- rules[ruleID] = config.Rule{
- RuleID: ruleID,
- }
- }
- return rules
-}
-
-func rulesToMap(rules *[]Rule) map[string]config.Rule {
- rulesMap := make(map[string]config.Rule)
- for _, rule := range *rules {
- rulesMap[rule.Rule.RuleID] = rule.Rule
- }
- return rulesMap
-}
-
-func TestIgnoreRules(t *testing.T) {
- tests := []struct {
- name string
- allRules *[]Rule
- tags []string
- expectedResult map[string]config.Rule
- }{
- {
- name: "Empty list",
- allRules: &[]Rule{
- createRule("rule1", "tag1", "tag2"),
- createRule("rule2", "tag2", "tag3"),
- },
- tags: []string{},
- expectedResult: createRules("rule1", "rule2"),
- },
- {
- name: "Ignore non-existing tag",
- allRules: &[]Rule{
- createRule("rule1", "tag1", "tag2"),
- createRule("rule2", "tag2", "tag3"),
- },
- tags: []string{"non-existing-tag"},
- expectedResult: createRules("rule1", "rule2"),
- },
- {
- name: "Ignore one rule ID",
- allRules: &[]Rule{
- createRule("rule1", "tag1", "tag2"),
- createRule("rule2", "tag2", "tag3"),
- },
- tags: []string{"rule1"},
- expectedResult: createRules("rule2"),
- },
- {
- name: "Ignore one tag",
- allRules: &[]Rule{
- createRule("rule1", "tag1", "tag2"),
- createRule("rule2", "tag2", "tag3"),
- },
- tags: []string{"tag2"},
- expectedResult: map[string]config.Rule{},
- },
- {
- name: "Ignore all tags",
- allRules: &[]Rule{
- createRule("rule1", "tag1", "tag2"),
- createRule("rule2", "tag2", "tag3"),
- },
- tags: []string{"tag1", "tag2", "tag3"},
- expectedResult: map[string]config.Rule{},
- },
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- gotResult := rulesToMap(ignoreRules(tt.allRules, tt.tags))
-
- if len(gotResult) != len(tt.expectedResult) {
- t.Errorf("expected %d rules, but got %d", len(tt.expectedResult), len(gotResult))
- }
-
- for _, rule := range *tt.allRules {
- if _, ok := tt.expectedResult[rule.Rule.RuleID]; ok {
- if _, ok := gotResult[rule.Rule.RuleID]; !ok {
- t.Errorf("expected rule %s to be present, but it was not", rule.Rule.RuleID)
- }
- } else {
- if _, ok := gotResult[rule.Rule.RuleID]; ok {
- t.Errorf("expected rule %s to be ignored, but it was not", rule.Rule.RuleID)
- }
- }
- }
- })
- }
-}
+package rules
+
+import (
+ "testing"
+
+ "github.com/zricethezav/gitleaks/v8/config"
+)
+
+func TestLoadAllRules(t *testing.T) {
+ rules := getDefaultRules()
+
+ if len(*rules) <= 1 {
+ t.Error("no rules were loaded")
+ }
+}
+
+func TestLoadAllRules_DuplicateRuleID(t *testing.T) {
+ ruleIDMap := make(map[string]bool)
+ allRules := getDefaultRules()
+
+ for _, rule := range *allRules {
+ if _, ok := ruleIDMap[rule.Rule.RuleID]; ok {
+ t.Errorf("duplicate rule id found: %s", rule.Rule.RuleID)
+ }
+
+ ruleIDMap[rule.Rule.RuleID] = true
+ }
+}
+
+func Test_FilterRules_SelectRules(t *testing.T) {
+ specialRule := HardcodedPassword()
+ allRules := *getDefaultRules()
+ rulesCount := len(allRules)
+
+ tests := []struct {
+ name string
+ selectedList []string
+ ignoreList []string
+ specialList []string
+ expectedLen int
+ }{
+ {
+ name: "selected flag used for one rule",
+ selectedList: []string{allRules[0].Rule.RuleID},
+ ignoreList: []string{},
+ expectedLen: 1,
+ },
+ {
+ name: "selected flag used for multiple rules",
+ selectedList: []string{allRules[0].Rule.RuleID, allRules[1].Rule.RuleID},
+ ignoreList: []string{},
+ expectedLen: 2,
+ },
+ {
+ name: "ignore flag used for one rule",
+ selectedList: []string{},
+ ignoreList: []string{allRules[0].Rule.RuleID},
+ expectedLen: rulesCount - 1,
+ },
+ {
+ name: "ignore flag used for multiple rules",
+ selectedList: []string{},
+ ignoreList: []string{allRules[0].Rule.RuleID, allRules[1].Rule.RuleID},
+ expectedLen: rulesCount - 2,
+ },
+ {
+ name: "selected and ignore flags used together for different rules",
+ selectedList: []string{allRules[0].Rule.RuleID},
+ ignoreList: []string{allRules[1].Rule.RuleID},
+ expectedLen: 1,
+ },
+ {
+ name: "selected and ignore flags used together for the same rule",
+ selectedList: []string{allRules[0].Rule.RuleID},
+ ignoreList: []string{allRules[0].Rule.RuleID},
+ expectedLen: 0,
+ },
+ {
+ name: "non existent select flag",
+ selectedList: []string{"non-existent-tag-name"},
+ ignoreList: []string{},
+ expectedLen: 0,
+ },
+ {
+ name: "non existent ignore flag",
+ selectedList: []string{},
+ ignoreList: []string{"non-existent-tag-name"},
+ expectedLen: rulesCount,
+ },
+ {
+ name: "no flags",
+ selectedList: []string{},
+ ignoreList: []string{},
+ expectedLen: rulesCount,
+ },
+ {
+ name: "add special rule",
+ selectedList: []string{},
+ ignoreList: []string{},
+ specialList: []string{specialRule.RuleID},
+ expectedLen: rulesCount + 1,
+ },
+ {
+ name: "select regular rule and special rule",
+ selectedList: []string{allRules[0].Rule.RuleID},
+ ignoreList: []string{},
+ specialList: []string{specialRule.RuleID},
+ expectedLen: 2,
+ },
+ {
+ name: "select regular rule and ignore it- should keep it",
+ selectedList: []string{"non-existent-tag-name"},
+ ignoreList: []string{specialRule.RuleID},
+ specialList: []string{specialRule.RuleID},
+ expectedLen: 1,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ secrets := *FilterRules(tt.selectedList, tt.ignoreList, tt.specialList)
+
+ if len(secrets) != tt.expectedLen {
+ t.Errorf("expected %d rules, but got %d", tt.expectedLen, len(secrets))
+ }
+ })
+ }
+}
+
+func TestSelectRules(t *testing.T) {
+ testCases := []struct {
+ name string
+ allRules *[]Rule
+ tags []string
+ expectedResult map[string]config.Rule
+ }{
+ {
+ name: "No matching tags",
+ allRules: &[]Rule{
+ createRule("rule1", "tag1", "tag2"),
+ createRule("rule2", "tag3", "tag4"),
+ },
+ tags: []string{"tag5", "tag6"},
+ expectedResult: map[string]config.Rule{},
+ },
+ {
+ name: "Matching rule ID",
+ allRules: &[]Rule{
+ createRule("rule1", "tag1", "tag2"),
+ createRule("rule2", "tag3", "tag4"),
+ },
+ tags: []string{"rule1"},
+ expectedResult: createRules("rule1"),
+ },
+ {
+ name: "Matching tag",
+ allRules: &[]Rule{
+ createRule("rule1", "tag1", "tag2"),
+ createRule("rule2", "tag3", "tag4"),
+ },
+ tags: []string{"tag2"},
+ expectedResult: createRules("rule1"),
+ },
+ {
+ name: "Matching tag and rule ID",
+ allRules: &[]Rule{
+ createRule("rule1", "tag1", "tag2"),
+ createRule("rule2", "tag3", "tag4"),
+ },
+ tags: []string{"rule1", "tag2"},
+ expectedResult: createRules("rule1"),
+ },
+ {
+ name: "Matching multiple tags",
+ allRules: &[]Rule{
+ createRule("rule1", "tag1", "tag2"),
+ createRule("rule2", "tag3", "tag4"),
+ createRule("rule3", "tag2", "tag4"),
+ },
+ tags: []string{"tag2", "tag4"},
+ expectedResult: createRules("rule1", "rule2", "rule3"),
+ },
+ }
+
+ for _, tc := range testCases {
+ t.Run(tc.name, func(t *testing.T) {
+ result := rulesToMap(selectRules(tc.allRules, tc.tags))
+
+ if len(result) != len(tc.expectedResult) {
+ t.Errorf("Expected %d rules to be applied, but got %d", len(tc.expectedResult), len(result))
+ }
+
+ for ruleID, expectedRule := range tc.expectedResult {
+ if _, ok := result[ruleID]; !ok {
+ t.Errorf("Expected rule %s to be applied, but it was not", ruleID)
+ } else {
+ if result[ruleID].RuleID != expectedRule.RuleID {
+ t.Errorf("Expected rule %s to have RuleID %s, but it had RuleID %s", ruleID, expectedRule.RuleID, result[ruleID].RuleID)
+ }
+ }
+ }
+ })
+ }
+}
+
+func createRule(ruleID string, tags ...string) Rule {
+ return Rule{
+ Rule: config.Rule{
+ RuleID: ruleID,
+ },
+ Tags: tags,
+ }
+}
+
+func createRules(ruleIDs ...string) map[string]config.Rule {
+ rules := make(map[string]config.Rule)
+ for _, ruleID := range ruleIDs {
+ rules[ruleID] = config.Rule{
+ RuleID: ruleID,
+ }
+ }
+ return rules
+}
+
+func rulesToMap(rules *[]Rule) map[string]config.Rule {
+ rulesMap := make(map[string]config.Rule)
+ for _, rule := range *rules {
+ rulesMap[rule.Rule.RuleID] = rule.Rule
+ }
+ return rulesMap
+}
+
+func TestIgnoreRules(t *testing.T) {
+ tests := []struct {
+ name string
+ allRules *[]Rule
+ tags []string
+ expectedResult map[string]config.Rule
+ }{
+ {
+ name: "Empty list",
+ allRules: &[]Rule{
+ createRule("rule1", "tag1", "tag2"),
+ createRule("rule2", "tag2", "tag3"),
+ },
+ tags: []string{},
+ expectedResult: createRules("rule1", "rule2"),
+ },
+ {
+ name: "Ignore non-existing tag",
+ allRules: &[]Rule{
+ createRule("rule1", "tag1", "tag2"),
+ createRule("rule2", "tag2", "tag3"),
+ },
+ tags: []string{"non-existing-tag"},
+ expectedResult: createRules("rule1", "rule2"),
+ },
+ {
+ name: "Ignore one rule ID",
+ allRules: &[]Rule{
+ createRule("rule1", "tag1", "tag2"),
+ createRule("rule2", "tag2", "tag3"),
+ },
+ tags: []string{"rule1"},
+ expectedResult: createRules("rule2"),
+ },
+ {
+ name: "Ignore one tag",
+ allRules: &[]Rule{
+ createRule("rule1", "tag1", "tag2"),
+ createRule("rule2", "tag2", "tag3"),
+ },
+ tags: []string{"tag2"},
+ expectedResult: map[string]config.Rule{},
+ },
+ {
+ name: "Ignore all tags",
+ allRules: &[]Rule{
+ createRule("rule1", "tag1", "tag2"),
+ createRule("rule2", "tag2", "tag3"),
+ },
+ tags: []string{"tag1", "tag2", "tag3"},
+ expectedResult: map[string]config.Rule{},
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ gotResult := rulesToMap(ignoreRules(tt.allRules, tt.tags))
+
+ if len(gotResult) != len(tt.expectedResult) {
+ t.Errorf("expected %d rules, but got %d", len(tt.expectedResult), len(gotResult))
+ }
+
+ for _, rule := range *tt.allRules {
+ if _, ok := tt.expectedResult[rule.Rule.RuleID]; ok {
+ if _, ok := gotResult[rule.Rule.RuleID]; !ok {
+ t.Errorf("expected rule %s to be present, but it was not", rule.Rule.RuleID)
+ }
+ } else {
+ if _, ok := gotResult[rule.Rule.RuleID]; ok {
+ t.Errorf("expected rule %s to be ignored, but it was not", rule.Rule.RuleID)
+ }
+ }
+ }
+ })
+ }
+}
diff --git a/engine/rules/vault.go b/engine/rules/vault.go
index d6b73211..65ee21ed 100644
--- a/engine/rules/vault.go
+++ b/engine/rules/vault.go
@@ -1,25 +1,25 @@
-package rules
-
-import (
- "github.com/zricethezav/gitleaks/v8/cmd/generate/secrets"
- "github.com/zricethezav/gitleaks/v8/config"
-)
-
-// Using this local version because newer versions of gitleaks have an entropy value, which was set as too high
-// It's here as prevention in case a newer version of gitleaks starts getting used and causes issues on this rule
-// If gitleaks is updated on 2ms and the new version of this rule has entropy, set it to 3.0
-func VaultServiceToken() *config.Rule {
- // define rule
- r := config.Rule{
- Description: "Identified a Vault Service Token, potentially compromising infrastructure security and access to sensitive credentials.",
- RuleID: "vault-service-token",
- Regex: generateUniqueTokenRegex(`hvs\.[a-z0-9_-]{90,100}`, true),
- Keywords: []string{"hvs"},
- }
-
- // validate
- tps := []string{
- generateSampleSecret("vault", "hvs."+secrets.NewSecret(alphaNumericExtendedShort("90"))),
- }
- return validate(r, tps, nil)
-}
+package rules
+
+import (
+ "github.com/zricethezav/gitleaks/v8/cmd/generate/secrets"
+ "github.com/zricethezav/gitleaks/v8/config"
+)
+
+// Using this local version because newer versions of gitleaks have an entropy value, which was set as too high
+// It's here as prevention in case a newer version of gitleaks starts getting used and causes issues on this rule
+// If gitleaks is updated on 2ms and the new version of this rule has entropy, set it to 3.0
+func VaultServiceToken() *config.Rule {
+ // define rule
+ r := config.Rule{
+ Description: "Identified a Vault Service Token, potentially compromising infrastructure security and access to sensitive credentials.",
+ RuleID: "vault-service-token",
+ Regex: generateUniqueTokenRegex(`hvs\.[a-z0-9_-]{90,100}`, true),
+ Keywords: []string{"hvs"},
+ }
+
+ // validate
+ tps := []string{
+ generateSampleSecret("vault", "hvs."+secrets.NewSecret(alphaNumericExtendedShort("90"))),
+ }
+ return validate(r, tps, nil)
+}
diff --git a/engine/score/score.go b/engine/score/score.go
index 8045ff6e..7390d327 100644
--- a/engine/score/score.go
+++ b/engine/score/score.go
@@ -1,72 +1,72 @@
-package score
-
-import (
- "github.com/checkmarx/2ms/engine/rules"
- "github.com/checkmarx/2ms/lib/secrets"
- "math"
-)
-
-func getCategoryScore(category rules.RuleCategory) uint8 {
- CategoryScore := map[rules.RuleCategory]uint8{
- rules.CategoryAuthenticationAndAuthorization: 4,
- rules.CategoryCryptocurrencyExchange: 4,
- rules.CategoryFinancialServices: 4,
- rules.CategoryPaymentProcessing: 4,
- rules.CategorySecurity: 4,
- rules.CategoryAPIAccess: 3,
- rules.CategoryCICD: 3,
- rules.CategoryCloudPlatform: 3,
- rules.CategoryDatabaseAsAService: 3,
- rules.CategoryDevelopmentPlatform: 3,
- rules.CategoryEmailDeliveryService: 3,
- rules.CategoryGeneralOrUnknown: 3,
- rules.CategoryInfrastructureAsCode: 3,
- rules.CategoryPackageManagement: 3,
- rules.CategorySourceCodeManagement: 3,
- rules.CategoryWebHostingAndDeployment: 3,
- rules.CategoryBackgroundProcessingService: 2,
- rules.CategoryCDN: 2,
- rules.CategoryContentManagementSystem: 2,
- rules.CategoryCustomerSupport: 2,
- rules.CategoryDataAnalytics: 2,
- rules.CategoryFileStorageAndSharing: 2,
- rules.CategoryIoTPlatform: 2,
- rules.CategoryMappingAndLocationServices: 2,
- rules.CategoryNetworking: 2,
- rules.CategoryPhotoSharing: 2,
- rules.CategorySaaS: 2,
- rules.CategoryShipping: 2,
- rules.CategorySoftwareDevelopment: 2,
- rules.CategoryAIAndMachineLearning: 1,
- rules.CategoryApplicationMonitoring: 1,
- rules.CategoryECommercePlatform: 1,
- rules.CategoryMarketingAutomation: 1,
- rules.CategoryNewsAndMedia: 1,
- rules.CategoryOnlineSurveyPlatform: 1,
- rules.CategoryProjectManagement: 1,
- rules.CategorySearchService: 1,
- rules.CategorySocialMedia: 1,
- }
- return CategoryScore[category]
-}
-
-func getValidityScore(baseRiskScore float64, validationStatus secrets.ValidationResult) float64 {
- switch validationStatus {
- case secrets.ValidResult:
- return math.Min(1, 4-baseRiskScore)
- case secrets.InvalidResult:
- return math.Max(-1, 1-baseRiskScore)
- }
- return 0.0
-}
-
-func GetBaseRiskScore(category rules.RuleCategory, ruleType uint8) float64 {
- categoryScore := getCategoryScore(category)
- return float64(categoryScore)*0.6 + float64(ruleType)*0.4
-}
-
-func GetCvssScore(baseRiskScore float64, validationStatus secrets.ValidationResult) float64 {
- validityScore := getValidityScore(baseRiskScore, validationStatus)
- cvssScore := (baseRiskScore+validityScore-1)*3 + 1
- return math.Round(cvssScore*10) / 10
-}
+package score
+
+import (
+ "github.com/checkmarx/2ms/engine/rules"
+ "github.com/checkmarx/2ms/lib/secrets"
+ "math"
+)
+
+func getCategoryScore(category rules.RuleCategory) uint8 {
+ CategoryScore := map[rules.RuleCategory]uint8{
+ rules.CategoryAuthenticationAndAuthorization: 4,
+ rules.CategoryCryptocurrencyExchange: 4,
+ rules.CategoryFinancialServices: 4,
+ rules.CategoryPaymentProcessing: 4,
+ rules.CategorySecurity: 4,
+ rules.CategoryAPIAccess: 3,
+ rules.CategoryCICD: 3,
+ rules.CategoryCloudPlatform: 3,
+ rules.CategoryDatabaseAsAService: 3,
+ rules.CategoryDevelopmentPlatform: 3,
+ rules.CategoryEmailDeliveryService: 3,
+ rules.CategoryGeneralOrUnknown: 3,
+ rules.CategoryInfrastructureAsCode: 3,
+ rules.CategoryPackageManagement: 3,
+ rules.CategorySourceCodeManagement: 3,
+ rules.CategoryWebHostingAndDeployment: 3,
+ rules.CategoryBackgroundProcessingService: 2,
+ rules.CategoryCDN: 2,
+ rules.CategoryContentManagementSystem: 2,
+ rules.CategoryCustomerSupport: 2,
+ rules.CategoryDataAnalytics: 2,
+ rules.CategoryFileStorageAndSharing: 2,
+ rules.CategoryIoTPlatform: 2,
+ rules.CategoryMappingAndLocationServices: 2,
+ rules.CategoryNetworking: 2,
+ rules.CategoryPhotoSharing: 2,
+ rules.CategorySaaS: 2,
+ rules.CategoryShipping: 2,
+ rules.CategorySoftwareDevelopment: 2,
+ rules.CategoryAIAndMachineLearning: 1,
+ rules.CategoryApplicationMonitoring: 1,
+ rules.CategoryECommercePlatform: 1,
+ rules.CategoryMarketingAutomation: 1,
+ rules.CategoryNewsAndMedia: 1,
+ rules.CategoryOnlineSurveyPlatform: 1,
+ rules.CategoryProjectManagement: 1,
+ rules.CategorySearchService: 1,
+ rules.CategorySocialMedia: 1,
+ }
+ return CategoryScore[category]
+}
+
+func getValidityScore(baseRiskScore float64, validationStatus secrets.ValidationResult) float64 {
+ switch validationStatus {
+ case secrets.ValidResult:
+ return math.Min(1, 4-baseRiskScore)
+ case secrets.InvalidResult:
+ return math.Max(-1, 1-baseRiskScore)
+ }
+ return 0.0
+}
+
+func GetBaseRiskScore(category rules.RuleCategory, ruleType uint8) float64 {
+ categoryScore := getCategoryScore(category)
+ return float64(categoryScore)*0.6 + float64(ruleType)*0.4
+}
+
+func GetCvssScore(baseRiskScore float64, validationStatus secrets.ValidationResult) float64 {
+ validityScore := getValidityScore(baseRiskScore, validationStatus)
+ cvssScore := (baseRiskScore+validityScore-1)*3 + 1
+ return math.Round(cvssScore*10) / 10
+}
diff --git a/engine/score/score_test.go b/engine/score/score_test.go
index 4515a4a0..46e56547 100644
--- a/engine/score/score_test.go
+++ b/engine/score/score_test.go
@@ -1,234 +1,235 @@
-package score_test
-
-import (
- . "github.com/checkmarx/2ms/engine"
- "github.com/checkmarx/2ms/engine/rules"
- "github.com/checkmarx/2ms/engine/score"
- "github.com/checkmarx/2ms/lib/secrets"
- "github.com/stretchr/testify/assert"
- ruleConfig "github.com/zricethezav/gitleaks/v8/cmd/generate/config/rules"
- "sync"
- "testing"
-)
-
-func TestScore(t *testing.T) {
- specialRule := rules.HardcodedPassword()
- allRules := *rules.FilterRules([]string{}, []string{}, []string{specialRule.RuleID})
-
- engineConfig := EngineConfig{SpecialList: []string{specialRule.RuleID}}
- engine, err := Init(engineConfig)
- assert.NoError(t, err)
-
- expectedCvssScores := map[string][3]float64{ // ruleID -> Valid, Invalid, Unknown
- ruleConfig.AdafruitAPIKey().RuleID: {9.4, 3.4, 6.4},
- ruleConfig.AdobeClientID().RuleID: {5.8, 1, 2.8},
- ruleConfig.AdobeClientSecret().RuleID: {9.4, 3.4, 6.4},
- ruleConfig.AgeSecretKey().RuleID: {10, 5.2, 8.2},
- ruleConfig.Airtable().RuleID: {10, 5.2, 8.2},
- ruleConfig.AlgoliaApiKey().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.AlibabaAccessKey().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.AlibabaSecretKey().RuleID: {10, 5.2, 8.2},
- ruleConfig.AsanaClientID().RuleID: {4, 1, 1},
- ruleConfig.AsanaClientSecret().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.Atlassian().RuleID: {9.4, 3.4, 6.4},
- ruleConfig.Authress().RuleID: {10, 7, 10},
- ruleConfig.AWS().RuleID: {10, 7, 10},
- ruleConfig.BitBucketClientID().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.BitBucketClientSecret().RuleID: {10, 5.2, 8.2},
- ruleConfig.BittrexAccessKey().RuleID: {10, 7, 10},
- ruleConfig.BittrexSecretKey().RuleID: {10, 7, 10},
- ruleConfig.Beamer().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.CodecovAccessToken().RuleID: {10, 7, 10},
- ruleConfig.CoinbaseAccessToken().RuleID: {10, 7, 10},
- ruleConfig.Clojars().RuleID: {10, 5.2, 8.2},
- ruleConfig.ConfluentAccessToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.ConfluentSecretKey().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.Contentful().RuleID: {9.4, 3.4, 6.4},
- ruleConfig.Databricks().RuleID: {9.4, 3.4, 6.4},
- ruleConfig.DatadogtokenAccessToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.DefinedNetworkingAPIToken().RuleID: {9.4, 3.4, 6.4},
- ruleConfig.DigitalOceanPAT().RuleID: {10, 5.2, 8.2},
- ruleConfig.DigitalOceanOAuthToken().RuleID: {10, 5.2, 8.2},
- ruleConfig.DigitalOceanRefreshToken().RuleID: {10, 5.2, 8.2},
- ruleConfig.DiscordAPIToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.DiscordClientID().RuleID: {4, 1, 1},
- ruleConfig.DiscordClientSecret().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.Doppler().RuleID: {10, 5.2, 8.2},
- ruleConfig.DropBoxAPISecret().RuleID: {9.4, 3.4, 6.4},
- ruleConfig.DropBoxShortLivedAPIToken().RuleID: {9.4, 3.4, 6.4},
- ruleConfig.DropBoxLongLivedAPIToken().RuleID: {9.4, 3.4, 6.4},
- ruleConfig.DroneciAccessToken().RuleID: {10, 5.2, 8.2},
- ruleConfig.Duffel().RuleID: {10, 5.2, 8.2},
- ruleConfig.Dynatrace().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.EasyPost().RuleID: {9.4, 3.4, 6.4},
- ruleConfig.EasyPostTestAPI().RuleID: {9.4, 3.4, 6.4},
- ruleConfig.EtsyAccessToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.Facebook().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.FastlyAPIToken().RuleID: {9.4, 3.4, 6.4},
- ruleConfig.FinicityClientSecret().RuleID: {10, 7, 10},
- ruleConfig.FinicityAPIToken().RuleID: {10, 7, 10},
- ruleConfig.FlickrAccessToken().RuleID: {9.4, 3.4, 6.4},
- ruleConfig.FinnhubAccessToken().RuleID: {10, 7, 10},
- ruleConfig.FlutterwavePublicKey().RuleID: {10, 7, 10},
- ruleConfig.FlutterwaveSecretKey().RuleID: {10, 7, 10},
- ruleConfig.FlutterwaveEncKey().RuleID: {10, 7, 10},
- ruleConfig.FrameIO().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.FreshbooksAccessToken().RuleID: {10, 7, 10},
- ruleConfig.GCPAPIKey().RuleID: {10, 5.2, 8.2},
- ruleConfig.GenericCredential().RuleID: {10, 5.2, 8.2},
- ruleConfig.GitHubPat().RuleID: {10, 5.2, 8.2},
- ruleConfig.GitHubFineGrainedPat().RuleID: {10, 5.2, 8.2},
- ruleConfig.GitHubOauth().RuleID: {10, 7, 10},
- ruleConfig.GitHubApp().RuleID: {10, 5.2, 8.2},
- ruleConfig.GitHubRefresh().RuleID: {10, 7, 10},
- ruleConfig.GitlabPat().RuleID: {10, 5.2, 8.2},
- ruleConfig.GitlabPipelineTriggerToken().RuleID: {10, 5.2, 8.2},
- ruleConfig.GitlabRunnerRegistrationToken().RuleID: {10, 5.2, 8.2},
- ruleConfig.GitterAccessToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.GoCardless().RuleID: {10, 7, 10},
- ruleConfig.GrafanaApiKey().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.GrafanaCloudApiToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.GrafanaServiceAccountToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.Hashicorp().RuleID: {10, 5.2, 8.2},
- ruleConfig.HashicorpField().RuleID: {10, 5.2, 8.2},
- ruleConfig.Heroku().RuleID: {9.4, 3.4, 6.4},
- ruleConfig.HubSpot().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.HuggingFaceAccessToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.HuggingFaceOrganizationApiToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.InfracostAPIToken().RuleID: {10, 7, 10},
- ruleConfig.Intercom().RuleID: {9.4, 3.4, 6.4},
- ruleConfig.JFrogAPIKey().RuleID: {10, 5.2, 8.2},
- ruleConfig.JFrogIdentityToken().RuleID: {10, 5.2, 8.2},
- ruleConfig.JWT().RuleID: {10, 5.2, 8.2},
- ruleConfig.JWTBase64().RuleID: {10, 5.2, 8.2},
- ruleConfig.KrakenAccessToken().RuleID: {10, 7, 10},
- ruleConfig.KucoinAccessToken().RuleID: {10, 7, 10},
- ruleConfig.KucoinSecretKey().RuleID: {10, 7, 10},
- ruleConfig.LaunchDarklyAccessToken().RuleID: {10, 5.2, 8.2},
- ruleConfig.LinearAPIToken().RuleID: {10, 5.2, 8.2},
- ruleConfig.LinearClientSecret().RuleID: {10, 7, 10},
- ruleConfig.LinkedinClientID().RuleID: {4, 1, 1},
- ruleConfig.LinkedinClientSecret().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.LobAPIToken().RuleID: {10, 5.2, 8.2},
- ruleConfig.LobPubAPIToken().RuleID: {10, 5.2, 8.2},
- ruleConfig.MailChimp().RuleID: {10, 5.2, 8.2},
- ruleConfig.MailGunPubAPIToken().RuleID: {10, 5.2, 8.2},
- ruleConfig.MailGunPrivateAPIToken().RuleID: {10, 5.2, 8.2},
- ruleConfig.MailGunSigningKey().RuleID: {10, 5.2, 8.2},
- ruleConfig.MapBox().RuleID: {9.4, 3.4, 6.4},
- ruleConfig.MattermostAccessToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.MessageBirdAPIToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.MessageBirdClientID().RuleID: {4, 1, 1},
- ruleConfig.NetlifyAccessToken().RuleID: {10, 5.2, 8.2},
- ruleConfig.NewRelicUserID().RuleID: {4, 1, 1},
- ruleConfig.NewRelicUserKey().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.NewRelicBrowserAPIKey().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.NPM().RuleID: {10, 5.2, 8.2},
- ruleConfig.NytimesAccessToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.OktaAccessToken().RuleID: {10, 7, 10},
- ruleConfig.OpenAI().RuleID: {7.6, 1.6, 4.6},
- rules.PlaidAccessID().RuleID: {9.4, 3.4, 6.4},
- ruleConfig.PlaidSecretKey().RuleID: {10, 7, 10},
- ruleConfig.PlaidAccessToken().RuleID: {10, 7, 10},
- ruleConfig.PlanetScalePassword().RuleID: {10, 5.2, 8.2},
- ruleConfig.PlanetScaleAPIToken().RuleID: {10, 5.2, 8.2},
- ruleConfig.PlanetScaleOAuthToken().RuleID: {10, 5.2, 8.2},
- ruleConfig.PostManAPI().RuleID: {10, 5.2, 8.2},
- ruleConfig.Prefect().RuleID: {10, 5.2, 8.2},
- rules.PrivateKey().RuleID: {10, 5.2, 8.2},
- ruleConfig.PulumiAPIToken().RuleID: {10, 5.2, 8.2},
- ruleConfig.PyPiUploadToken().RuleID: {10, 5.2, 8.2},
- ruleConfig.RapidAPIAccessToken().RuleID: {10, 5.2, 8.2},
- ruleConfig.ReadMe().RuleID: {10, 5.2, 8.2},
- ruleConfig.RubyGemsAPIToken().RuleID: {10, 5.2, 8.2},
- ruleConfig.ScalingoAPIToken().RuleID: {10, 5.2, 8.2},
- ruleConfig.SendbirdAccessID().RuleID: {4, 1, 1},
- ruleConfig.SendbirdAccessToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.SendGridAPIToken().RuleID: {10, 5.2, 8.2},
- ruleConfig.SendInBlueAPIToken().RuleID: {10, 5.2, 8.2},
- ruleConfig.SentryAccessToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.ShippoAPIToken().RuleID: {9.4, 3.4, 6.4},
- ruleConfig.ShopifyAccessToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.ShopifyCustomAccessToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.ShopifyPrivateAppAccessToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.ShopifySharedSecret().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.SidekiqSecret().RuleID: {9.4, 3.4, 6.4},
- ruleConfig.SidekiqSensitiveUrl().RuleID: {9.4, 3.4, 6.4},
- ruleConfig.SlackBotToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.SlackAppLevelToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.SlackLegacyToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.SlackUserToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.SlackConfigurationToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.SlackConfigurationRefreshToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.SlackLegacyBotToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.SlackLegacyWorkspaceToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.SlackWebHookUrl().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.StripeAccessToken().RuleID: {10, 7, 10},
- ruleConfig.SquareAccessToken().RuleID: {10, 7, 10},
- ruleConfig.SquareSpaceAccessToken().RuleID: {10, 5.2, 8.2},
- ruleConfig.SumoLogicAccessID().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.SumoLogicAccessToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.Snyk().RuleID: {10, 7, 10},
- ruleConfig.TeamsWebhook().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.TelegramBotToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.TravisCIAccessToken().RuleID: {10, 5.2, 8.2},
- ruleConfig.Twilio().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.TwitchAPIToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.TwitterAPIKey().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.TwitterAPISecret().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.TwitterAccessToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.TwitterAccessSecret().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.TwitterBearerToken().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.Typeform().RuleID: {7.6, 1.6, 4.6},
- ruleConfig.VaultBatchToken().RuleID: {10, 7, 10},
- rules.VaultServiceToken().RuleID: {10, 7, 10},
- ruleConfig.YandexAPIKey().RuleID: {10, 5.2, 8.2},
- ruleConfig.YandexAWSAccessToken().RuleID: {10, 5.2, 8.2},
- ruleConfig.YandexAccessToken().RuleID: {10, 5.2, 8.2},
- ruleConfig.ZendeskSecretKey().RuleID: {9.4, 3.4, 6.4},
- rules.AuthenticatedURL().RuleID: {10, 5.2, 8.2},
- specialRule.RuleID: {10, 5.2, 8.2},
- }
- for _, rule := range allRules {
- expectedRuleScores := expectedCvssScores[rule.Rule.RuleID]
- baseRiskScore := score.GetBaseRiskScore(rule.ScoreParameters.Category, rule.ScoreParameters.RuleType)
- ruleBaseRiskScore := engine.GetRuleBaseRiskScore(rule.Rule.RuleID)
- assert.Equal(t, ruleBaseRiskScore, baseRiskScore, "rule: %s", rule.Rule.RuleID)
- assert.Equal(t, expectedRuleScores[0], score.GetCvssScore(baseRiskScore, secrets.ValidResult), "rule: %s", rule.Rule.RuleID)
- assert.Equal(t, expectedRuleScores[1], score.GetCvssScore(baseRiskScore, secrets.InvalidResult), "rule: %s", rule.Rule.RuleID)
- assert.Equal(t, expectedRuleScores[2], score.GetCvssScore(baseRiskScore, secrets.UnknownResult), "rule: %s", rule.Rule.RuleID)
- }
-
- var allSecrets []*secrets.Secret
- for _, rule := range allRules {
- var secretValid, secretInvalid, secretUnknown secrets.Secret
- secretValid.RuleID = rule.Rule.RuleID
- secretValid.ValidationStatus = secrets.ValidResult
- secretInvalid.RuleID = rule.Rule.RuleID
- secretInvalid.ValidationStatus = secrets.InvalidResult
- secretUnknown.RuleID = rule.Rule.RuleID
- secretUnknown.ValidationStatus = secrets.UnknownResult
- allSecrets = append(allSecrets, &secretValid, &secretInvalid, &secretUnknown)
- }
- for _, secret := range allSecrets {
- var wg sync.WaitGroup
- wg.Add(2)
- expectedRuleScores := expectedCvssScores[secret.RuleID]
- validityIndex := getValidityIndex(secret.ValidationStatus)
- unknownIndex := getValidityIndex(secrets.UnknownResult)
- engine.Score(secret, true, &wg)
- assert.Equal(t, expectedRuleScores[validityIndex], secret.CvssScore, "rule: %s", secret.RuleID)
- engine.Score(secret, false, &wg)
- assert.Equal(t, expectedRuleScores[unknownIndex], secret.CvssScore, "rule: %s", secret.RuleID)
- }
-}
-
-func getValidityIndex(validity secrets.ValidationResult) int {
- switch validity {
- case secrets.ValidResult:
- return 0
- case secrets.InvalidResult:
- return 1
- }
- return 2
-}
+package score_test
+
+import (
+ "sync"
+ "testing"
+
+ . "github.com/checkmarx/2ms/engine"
+ "github.com/checkmarx/2ms/engine/rules"
+ "github.com/checkmarx/2ms/engine/score"
+ "github.com/checkmarx/2ms/lib/secrets"
+ "github.com/stretchr/testify/assert"
+ ruleConfig "github.com/zricethezav/gitleaks/v8/cmd/generate/config/rules"
+)
+
+func TestScore(t *testing.T) {
+ specialRule := rules.HardcodedPassword()
+ allRules := *rules.FilterRules([]string{}, []string{}, []string{specialRule.RuleID})
+
+ engineConfig := EngineConfig{SpecialList: []string{specialRule.RuleID}}
+ engine, err := Init(engineConfig)
+ assert.NoError(t, err)
+
+ expectedCvssScores := map[string][3]float64{ // ruleID -> Valid, Invalid, Unknown
+ ruleConfig.AdafruitAPIKey().RuleID: {9.4, 3.4, 6.4},
+ ruleConfig.AdobeClientID().RuleID: {5.8, 1, 2.8},
+ ruleConfig.AdobeClientSecret().RuleID: {9.4, 3.4, 6.4},
+ ruleConfig.AgeSecretKey().RuleID: {10, 5.2, 8.2},
+ ruleConfig.Airtable().RuleID: {10, 5.2, 8.2},
+ ruleConfig.AlgoliaApiKey().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.AlibabaAccessKey().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.AlibabaSecretKey().RuleID: {10, 5.2, 8.2},
+ ruleConfig.AsanaClientID().RuleID: {4, 1, 1},
+ ruleConfig.AsanaClientSecret().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.Atlassian().RuleID: {9.4, 3.4, 6.4},
+ ruleConfig.Authress().RuleID: {10, 7, 10},
+ ruleConfig.AWS().RuleID: {10, 7, 10},
+ ruleConfig.BitBucketClientID().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.BitBucketClientSecret().RuleID: {10, 5.2, 8.2},
+ ruleConfig.BittrexAccessKey().RuleID: {10, 7, 10},
+ ruleConfig.BittrexSecretKey().RuleID: {10, 7, 10},
+ ruleConfig.Beamer().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.CodecovAccessToken().RuleID: {10, 7, 10},
+ ruleConfig.CoinbaseAccessToken().RuleID: {10, 7, 10},
+ ruleConfig.Clojars().RuleID: {10, 5.2, 8.2},
+ ruleConfig.ConfluentAccessToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.ConfluentSecretKey().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.Contentful().RuleID: {9.4, 3.4, 6.4},
+ ruleConfig.Databricks().RuleID: {9.4, 3.4, 6.4},
+ ruleConfig.DatadogtokenAccessToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.DefinedNetworkingAPIToken().RuleID: {9.4, 3.4, 6.4},
+ ruleConfig.DigitalOceanPAT().RuleID: {10, 5.2, 8.2},
+ ruleConfig.DigitalOceanOAuthToken().RuleID: {10, 5.2, 8.2},
+ ruleConfig.DigitalOceanRefreshToken().RuleID: {10, 5.2, 8.2},
+ ruleConfig.DiscordAPIToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.DiscordClientID().RuleID: {4, 1, 1},
+ ruleConfig.DiscordClientSecret().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.Doppler().RuleID: {10, 5.2, 8.2},
+ ruleConfig.DropBoxAPISecret().RuleID: {9.4, 3.4, 6.4},
+ ruleConfig.DropBoxShortLivedAPIToken().RuleID: {9.4, 3.4, 6.4},
+ ruleConfig.DropBoxLongLivedAPIToken().RuleID: {9.4, 3.4, 6.4},
+ ruleConfig.DroneciAccessToken().RuleID: {10, 5.2, 8.2},
+ ruleConfig.Duffel().RuleID: {10, 5.2, 8.2},
+ ruleConfig.Dynatrace().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.EasyPost().RuleID: {9.4, 3.4, 6.4},
+ ruleConfig.EasyPostTestAPI().RuleID: {9.4, 3.4, 6.4},
+ ruleConfig.EtsyAccessToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.Facebook().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.FastlyAPIToken().RuleID: {9.4, 3.4, 6.4},
+ ruleConfig.FinicityClientSecret().RuleID: {10, 7, 10},
+ ruleConfig.FinicityAPIToken().RuleID: {10, 7, 10},
+ ruleConfig.FlickrAccessToken().RuleID: {9.4, 3.4, 6.4},
+ ruleConfig.FinnhubAccessToken().RuleID: {10, 7, 10},
+ ruleConfig.FlutterwavePublicKey().RuleID: {10, 7, 10},
+ ruleConfig.FlutterwaveSecretKey().RuleID: {10, 7, 10},
+ ruleConfig.FlutterwaveEncKey().RuleID: {10, 7, 10},
+ ruleConfig.FrameIO().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.FreshbooksAccessToken().RuleID: {10, 7, 10},
+ ruleConfig.GCPAPIKey().RuleID: {10, 5.2, 8.2},
+ ruleConfig.GenericCredential().RuleID: {10, 5.2, 8.2},
+ ruleConfig.GitHubPat().RuleID: {10, 5.2, 8.2},
+ ruleConfig.GitHubFineGrainedPat().RuleID: {10, 5.2, 8.2},
+ ruleConfig.GitHubOauth().RuleID: {10, 7, 10},
+ ruleConfig.GitHubApp().RuleID: {10, 5.2, 8.2},
+ ruleConfig.GitHubRefresh().RuleID: {10, 7, 10},
+ ruleConfig.GitlabPat().RuleID: {10, 5.2, 8.2},
+ ruleConfig.GitlabPipelineTriggerToken().RuleID: {10, 5.2, 8.2},
+ ruleConfig.GitlabRunnerRegistrationToken().RuleID: {10, 5.2, 8.2},
+ ruleConfig.GitterAccessToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.GoCardless().RuleID: {10, 7, 10},
+ ruleConfig.GrafanaApiKey().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.GrafanaCloudApiToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.GrafanaServiceAccountToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.Hashicorp().RuleID: {10, 5.2, 8.2},
+ ruleConfig.HashicorpField().RuleID: {10, 5.2, 8.2},
+ ruleConfig.Heroku().RuleID: {9.4, 3.4, 6.4},
+ ruleConfig.HubSpot().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.HuggingFaceAccessToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.HuggingFaceOrganizationApiToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.InfracostAPIToken().RuleID: {10, 7, 10},
+ ruleConfig.Intercom().RuleID: {9.4, 3.4, 6.4},
+ ruleConfig.JFrogAPIKey().RuleID: {10, 5.2, 8.2},
+ ruleConfig.JFrogIdentityToken().RuleID: {10, 5.2, 8.2},
+ ruleConfig.JWT().RuleID: {10, 5.2, 8.2},
+ ruleConfig.JWTBase64().RuleID: {10, 5.2, 8.2},
+ ruleConfig.KrakenAccessToken().RuleID: {10, 7, 10},
+ ruleConfig.KucoinAccessToken().RuleID: {10, 7, 10},
+ ruleConfig.KucoinSecretKey().RuleID: {10, 7, 10},
+ ruleConfig.LaunchDarklyAccessToken().RuleID: {10, 5.2, 8.2},
+ ruleConfig.LinearAPIToken().RuleID: {10, 5.2, 8.2},
+ ruleConfig.LinearClientSecret().RuleID: {10, 7, 10},
+ ruleConfig.LinkedinClientID().RuleID: {4, 1, 1},
+ ruleConfig.LinkedinClientSecret().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.LobAPIToken().RuleID: {10, 5.2, 8.2},
+ ruleConfig.LobPubAPIToken().RuleID: {10, 5.2, 8.2},
+ ruleConfig.MailChimp().RuleID: {10, 5.2, 8.2},
+ ruleConfig.MailGunPubAPIToken().RuleID: {10, 5.2, 8.2},
+ ruleConfig.MailGunPrivateAPIToken().RuleID: {10, 5.2, 8.2},
+ ruleConfig.MailGunSigningKey().RuleID: {10, 5.2, 8.2},
+ ruleConfig.MapBox().RuleID: {9.4, 3.4, 6.4},
+ ruleConfig.MattermostAccessToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.MessageBirdAPIToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.MessageBirdClientID().RuleID: {4, 1, 1},
+ ruleConfig.NetlifyAccessToken().RuleID: {10, 5.2, 8.2},
+ ruleConfig.NewRelicUserID().RuleID: {4, 1, 1},
+ ruleConfig.NewRelicUserKey().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.NewRelicBrowserAPIKey().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.NPM().RuleID: {10, 5.2, 8.2},
+ ruleConfig.NytimesAccessToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.OktaAccessToken().RuleID: {10, 7, 10},
+ ruleConfig.OpenAI().RuleID: {7.6, 1.6, 4.6},
+ rules.PlaidAccessID().RuleID: {9.4, 3.4, 6.4},
+ ruleConfig.PlaidSecretKey().RuleID: {10, 7, 10},
+ ruleConfig.PlaidAccessToken().RuleID: {10, 7, 10},
+ ruleConfig.PlanetScalePassword().RuleID: {10, 5.2, 8.2},
+ ruleConfig.PlanetScaleAPIToken().RuleID: {10, 5.2, 8.2},
+ ruleConfig.PlanetScaleOAuthToken().RuleID: {10, 5.2, 8.2},
+ ruleConfig.PostManAPI().RuleID: {10, 5.2, 8.2},
+ ruleConfig.Prefect().RuleID: {10, 5.2, 8.2},
+ rules.PrivateKey().RuleID: {10, 5.2, 8.2},
+ ruleConfig.PulumiAPIToken().RuleID: {10, 5.2, 8.2},
+ ruleConfig.PyPiUploadToken().RuleID: {10, 5.2, 8.2},
+ ruleConfig.RapidAPIAccessToken().RuleID: {10, 5.2, 8.2},
+ ruleConfig.ReadMe().RuleID: {10, 5.2, 8.2},
+ ruleConfig.RubyGemsAPIToken().RuleID: {10, 5.2, 8.2},
+ ruleConfig.ScalingoAPIToken().RuleID: {10, 5.2, 8.2},
+ ruleConfig.SendbirdAccessID().RuleID: {4, 1, 1},
+ ruleConfig.SendbirdAccessToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.SendGridAPIToken().RuleID: {10, 5.2, 8.2},
+ ruleConfig.SendInBlueAPIToken().RuleID: {10, 5.2, 8.2},
+ ruleConfig.SentryAccessToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.ShippoAPIToken().RuleID: {9.4, 3.4, 6.4},
+ ruleConfig.ShopifyAccessToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.ShopifyCustomAccessToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.ShopifyPrivateAppAccessToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.ShopifySharedSecret().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.SidekiqSecret().RuleID: {9.4, 3.4, 6.4},
+ ruleConfig.SidekiqSensitiveUrl().RuleID: {9.4, 3.4, 6.4},
+ ruleConfig.SlackBotToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.SlackAppLevelToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.SlackLegacyToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.SlackUserToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.SlackConfigurationToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.SlackConfigurationRefreshToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.SlackLegacyBotToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.SlackLegacyWorkspaceToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.SlackWebHookUrl().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.StripeAccessToken().RuleID: {10, 7, 10},
+ ruleConfig.SquareAccessToken().RuleID: {10, 7, 10},
+ ruleConfig.SquareSpaceAccessToken().RuleID: {10, 5.2, 8.2},
+ ruleConfig.SumoLogicAccessID().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.SumoLogicAccessToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.Snyk().RuleID: {10, 7, 10},
+ ruleConfig.TeamsWebhook().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.TelegramBotToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.TravisCIAccessToken().RuleID: {10, 5.2, 8.2},
+ ruleConfig.Twilio().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.TwitchAPIToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.TwitterAPIKey().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.TwitterAPISecret().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.TwitterAccessToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.TwitterAccessSecret().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.TwitterBearerToken().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.Typeform().RuleID: {7.6, 1.6, 4.6},
+ ruleConfig.VaultBatchToken().RuleID: {10, 7, 10},
+ rules.VaultServiceToken().RuleID: {10, 7, 10},
+ ruleConfig.YandexAPIKey().RuleID: {10, 5.2, 8.2},
+ ruleConfig.YandexAWSAccessToken().RuleID: {10, 5.2, 8.2},
+ ruleConfig.YandexAccessToken().RuleID: {10, 5.2, 8.2},
+ ruleConfig.ZendeskSecretKey().RuleID: {9.4, 3.4, 6.4},
+ rules.AuthenticatedURL().RuleID: {10, 5.2, 8.2},
+ specialRule.RuleID: {10, 5.2, 8.2},
+ }
+ for _, rule := range allRules {
+ expectedRuleScores := expectedCvssScores[rule.Rule.RuleID]
+ baseRiskScore := score.GetBaseRiskScore(rule.ScoreParameters.Category, rule.ScoreParameters.RuleType)
+ ruleBaseRiskScore := engine.GetRuleBaseRiskScore(rule.Rule.RuleID)
+ assert.Equal(t, ruleBaseRiskScore, baseRiskScore, "rule: %s", rule.Rule.RuleID)
+ assert.Equal(t, expectedRuleScores[0], score.GetCvssScore(baseRiskScore, secrets.ValidResult), "rule: %s", rule.Rule.RuleID)
+ assert.Equal(t, expectedRuleScores[1], score.GetCvssScore(baseRiskScore, secrets.InvalidResult), "rule: %s", rule.Rule.RuleID)
+ assert.Equal(t, expectedRuleScores[2], score.GetCvssScore(baseRiskScore, secrets.UnknownResult), "rule: %s", rule.Rule.RuleID)
+ }
+
+ var allSecrets []*secrets.Secret
+ for _, rule := range allRules {
+ var secretValid, secretInvalid, secretUnknown secrets.Secret
+ secretValid.RuleID = rule.Rule.RuleID
+ secretValid.ValidationStatus = secrets.ValidResult
+ secretInvalid.RuleID = rule.Rule.RuleID
+ secretInvalid.ValidationStatus = secrets.InvalidResult
+ secretUnknown.RuleID = rule.Rule.RuleID
+ secretUnknown.ValidationStatus = secrets.UnknownResult
+ allSecrets = append(allSecrets, &secretValid, &secretInvalid, &secretUnknown)
+ }
+ for _, secret := range allSecrets {
+ var wg sync.WaitGroup
+ wg.Add(2)
+ expectedRuleScores := expectedCvssScores[secret.RuleID]
+ validityIndex := getValidityIndex(secret.ValidationStatus)
+ unknownIndex := getValidityIndex(secrets.UnknownResult)
+ engine.Score(secret, true)
+ assert.Equal(t, expectedRuleScores[validityIndex], secret.CvssScore, "rule: %s", secret.RuleID)
+ engine.Score(secret, false)
+ assert.Equal(t, expectedRuleScores[unknownIndex], secret.CvssScore, "rule: %s", secret.RuleID)
+ }
+}
+
+func getValidityIndex(validity secrets.ValidationResult) int {
+ switch validity {
+ case secrets.ValidResult:
+ return 0
+ case secrets.InvalidResult:
+ return 1
+ }
+ return 2
+}
diff --git a/engine/validation/alibaba.go b/engine/validation/alibaba.go
index 3a5fb86d..92898e2b 100644
--- a/engine/validation/alibaba.go
+++ b/engine/validation/alibaba.go
@@ -1,90 +1,90 @@
-package validation
-
-import (
- "crypto/hmac"
- "crypto/sha1"
- "encoding/base64"
- "fmt"
- "net/http"
- "net/url"
- "strconv"
- "strings"
- "time"
-
- "github.com/checkmarx/2ms/lib/secrets"
- "github.com/rs/zerolog/log"
-)
-
-// https://www.alibabacloud.com/help/en/sdk/alibaba-cloud-api-overview
-// https://www.alibabacloud.com/help/en/sdk/product-overview/rpc-mechanism#sectiondiv-y9b-x9s-wvp
-
-func validateAlibaba(secretsPairs pairsByRuleId) {
-
- accessKeys := secretsPairs["alibaba-access-key-id"]
- secretKeys := secretsPairs["alibaba-secret-key"]
-
- for _, accessKey := range accessKeys {
- accessKey.ValidationStatus = secrets.UnknownResult
-
- for _, secretKey := range secretKeys {
- status, err := alibabaRequest(accessKey.Value, secretKey.Value)
- if err != nil {
- log.Warn().Err(err).Str("service", "alibaba").Msg("Failed to validate secret")
- }
-
- secretKey.ValidationStatus = status
- if accessKey.ValidationStatus.CompareTo(status) > 0 {
- accessKey.ValidationStatus = status
- }
- }
- }
-}
-
-func alibabaRequest(accessKey, secretKey string) (secrets.ValidationResult, error) {
- req, err := http.NewRequest("GET", "https://ecs.aliyuncs.com/", nil)
- if err != nil {
- return secrets.UnknownResult, err
- }
-
- // Workaround for gitleaks returns the key ends with "
- // https://github.com/gitleaks/gitleaks/pull/1350
- accessKey = strings.TrimSuffix(accessKey, "\"")
- secretKey = strings.TrimSuffix(secretKey, "\"")
-
- params := req.URL.Query()
- params.Add("AccessKeyId", accessKey)
- params.Add("Action", "DescribeRegions")
- params.Add("SignatureMethod", "HMAC-SHA1")
- params.Add("SignatureNonce", strconv.FormatInt(time.Now().UnixNano(), 10))
- params.Add("SignatureVersion", "1.0")
- params.Add("Timestamp", time.Now().UTC().Format(time.RFC3339))
- params.Add("Version", "2014-05-26")
-
- stringToSign := "GET&%2F&" + url.QueryEscape(params.Encode())
- hmac := hmac.New(sha1.New, []byte(secretKey+"&"))
- hmac.Write([]byte(stringToSign))
- signature := base64.StdEncoding.EncodeToString(hmac.Sum(nil))
-
- params.Add("Signature", signature)
- req.URL.RawQuery = params.Encode()
-
- client := &http.Client{}
- resp, err := client.Do(req)
- if err != nil {
- return secrets.UnknownResult, err
- }
- log.Debug().Str("service", "alibaba").Int("status_code", resp.StatusCode)
-
- // If the access key is invalid, the response will be 404
- // If the secret key is invalid, the response will be 400 along with other signautre Errors
- if resp.StatusCode == http.StatusNotFound || resp.StatusCode == http.StatusBadRequest {
- return secrets.InvalidResult, nil
- }
-
- if resp.StatusCode == http.StatusOK {
- return secrets.ValidResult, nil
- }
-
- err = fmt.Errorf("unexpected status code: %d", resp.StatusCode)
- return secrets.UnknownResult, err
-}
+package validation
+
+import (
+ "crypto/hmac"
+ "crypto/sha1"
+ "encoding/base64"
+ "fmt"
+ "net/http"
+ "net/url"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/checkmarx/2ms/lib/secrets"
+ "github.com/rs/zerolog/log"
+)
+
+// https://www.alibabacloud.com/help/en/sdk/alibaba-cloud-api-overview
+// https://www.alibabacloud.com/help/en/sdk/product-overview/rpc-mechanism#sectiondiv-y9b-x9s-wvp
+
+func validateAlibaba(secretsPairs pairsByRuleId) {
+
+ accessKeys := secretsPairs["alibaba-access-key-id"]
+ secretKeys := secretsPairs["alibaba-secret-key"]
+
+ for _, accessKey := range accessKeys {
+ accessKey.ValidationStatus = secrets.UnknownResult
+
+ for _, secretKey := range secretKeys {
+ status, err := alibabaRequest(accessKey.Value, secretKey.Value)
+ if err != nil {
+ log.Warn().Err(err).Str("service", "alibaba").Msg("Failed to validate secret")
+ }
+
+ secretKey.ValidationStatus = status
+ if accessKey.ValidationStatus.CompareTo(status) > 0 {
+ accessKey.ValidationStatus = status
+ }
+ }
+ }
+}
+
+func alibabaRequest(accessKey, secretKey string) (secrets.ValidationResult, error) {
+ req, err := http.NewRequest("GET", "https://ecs.aliyuncs.com/", nil)
+ if err != nil {
+ return secrets.UnknownResult, err
+ }
+
+ // Workaround for gitleaks returns the key ends with "
+ // https://github.com/gitleaks/gitleaks/pull/1350
+ accessKey = strings.TrimSuffix(accessKey, "\"")
+ secretKey = strings.TrimSuffix(secretKey, "\"")
+
+ params := req.URL.Query()
+ params.Add("AccessKeyId", accessKey)
+ params.Add("Action", "DescribeRegions")
+ params.Add("SignatureMethod", "HMAC-SHA1")
+ params.Add("SignatureNonce", strconv.FormatInt(time.Now().UnixNano(), 10))
+ params.Add("SignatureVersion", "1.0")
+ params.Add("Timestamp", time.Now().UTC().Format(time.RFC3339))
+ params.Add("Version", "2014-05-26")
+
+ stringToSign := "GET&%2F&" + url.QueryEscape(params.Encode())
+ hmac := hmac.New(sha1.New, []byte(secretKey+"&"))
+ hmac.Write([]byte(stringToSign))
+ signature := base64.StdEncoding.EncodeToString(hmac.Sum(nil))
+
+ params.Add("Signature", signature)
+ req.URL.RawQuery = params.Encode()
+
+ client := &http.Client{}
+ resp, err := client.Do(req)
+ if err != nil {
+ return secrets.UnknownResult, err
+ }
+ log.Debug().Str("service", "alibaba").Int("status_code", resp.StatusCode)
+
+ // If the access key is invalid, the response will be 404
+ // If the secret key is invalid, the response will be 400 along with other signautre Errors
+ if resp.StatusCode == http.StatusNotFound || resp.StatusCode == http.StatusBadRequest {
+ return secrets.InvalidResult, nil
+ }
+
+ if resp.StatusCode == http.StatusOK {
+ return secrets.ValidResult, nil
+ }
+
+ err = fmt.Errorf("unexpected status code: %d", resp.StatusCode)
+ return secrets.UnknownResult, err
+}
diff --git a/engine/validation/client.go b/engine/validation/client.go
index b6577f73..17f3d017 100644
--- a/engine/validation/client.go
+++ b/engine/validation/client.go
@@ -1,21 +1,21 @@
-package validation
-
-import (
- "net/http"
-)
-
-func sendValidationRequest(endpoint string, authorization string) (*http.Response, error) {
- req, err := http.NewRequest("GET", endpoint, nil)
- if err != nil {
- return nil, err
- }
- req.Header.Set("Authorization", authorization)
-
- client := &http.Client{}
- resp, err := client.Do(req)
- if err != nil {
- return nil, err
- }
-
- return resp, nil
-}
+package validation
+
+import (
+ "net/http"
+)
+
+func sendValidationRequest(endpoint string, authorization string) (*http.Response, error) {
+ req, err := http.NewRequest("GET", endpoint, nil)
+ if err != nil {
+ return nil, err
+ }
+ req.Header.Set("Authorization", authorization)
+
+ client := &http.Client{}
+ resp, err := client.Do(req)
+ if err != nil {
+ return nil, err
+ }
+
+ return resp, nil
+}
diff --git a/engine/validation/gcp.go b/engine/validation/gcp.go
index df20762c..0ed95c95 100644
--- a/engine/validation/gcp.go
+++ b/engine/validation/gcp.go
@@ -1,81 +1,81 @@
-package validation
-
-import (
- "encoding/json"
- "io"
- "net/http"
- "strings"
-
- "github.com/checkmarx/2ms/lib/secrets"
- "github.com/rs/zerolog/log"
-)
-
-type errorResponse struct {
- Error struct {
- Message string `json:"message"`
- Details []struct {
- Type string `json:"@type"`
- Metadata struct {
- Consumer string `json:"consumer"`
- } `json:"metadata,omitempty"`
- } `json:"details"`
- } `json:"error"`
-}
-
-func validateGCP(s *secrets.Secret) (secrets.ValidationResult, string) {
- testURL := "https://youtube.googleapis.com/youtube/v3/search?part=snippet&key=" + s.Value
-
- req, err := http.NewRequest("GET", testURL, nil)
- if err != nil {
- log.Warn().Err(err).Msg("Failed to validate secret")
- return secrets.UnknownResult, ""
- }
-
- client := &http.Client{}
- resp, err := client.Do(req)
- if err != nil {
- log.Warn().Err(err).Msg("Failed to validate secret")
- return secrets.UnknownResult, ""
- }
-
- result, extra, err := checkGCPErrorResponse(resp)
- if err != nil {
- log.Warn().Err(err).Msg("Failed to validate secret")
- }
- return result, extra
-}
-
-func checkGCPErrorResponse(resp *http.Response) (secrets.ValidationResult, string, error) {
- if resp.StatusCode == http.StatusOK {
- return secrets.ValidResult, "", nil
- }
-
- if resp.StatusCode != http.StatusForbidden {
- return secrets.InvalidResult, "", nil
- }
-
- bodyBytes, err := io.ReadAll(resp.Body)
- if err != nil {
- return secrets.UnknownResult, "", err
- }
-
- // Unmarshal the response body into the ErrorResponse struct
- var errorResponse errorResponse
- err = json.Unmarshal(bodyBytes, &errorResponse)
- if err != nil {
- return secrets.UnknownResult, "", err
- }
-
- if strings.Contains(errorResponse.Error.Message, "YouTube Data API v3 has not been used in project") {
- extra := ""
- for _, detail := range errorResponse.Error.Details {
- if detail.Type == "type.googleapis.com/google.rpc.ErrorInfo" {
- extra = detail.Metadata.Consumer
- }
- }
- return secrets.ValidResult, extra, nil
- }
-
- return secrets.UnknownResult, "", nil
-
-}
+package validation
+
+import (
+ "encoding/json"
+ "io"
+ "net/http"
+ "strings"
+
+ "github.com/checkmarx/2ms/lib/secrets"
+ "github.com/rs/zerolog/log"
+)
+
+type errorResponse struct {
+ Error struct {
+ Message string `json:"message"`
+ Details []struct {
+ Type string `json:"@type"`
+ Metadata struct {
+ Consumer string `json:"consumer"`
+ } `json:"metadata,omitempty"`
+ } `json:"details"`
+ } `json:"error"`
+}
+
+func validateGCP(s *secrets.Secret) (secrets.ValidationResult, string) {
+ testURL := "https://youtube.googleapis.com/youtube/v3/search?part=snippet&key=" + s.Value
+
+ req, err := http.NewRequest("GET", testURL, nil)
+ if err != nil {
+ log.Warn().Err(err).Msg("Failed to validate secret")
+ return secrets.UnknownResult, ""
+ }
+
+ client := &http.Client{}
+ resp, err := client.Do(req)
+ if err != nil {
+ log.Warn().Err(err).Msg("Failed to validate secret")
+ return secrets.UnknownResult, ""
+ }
+
+ result, extra, err := checkGCPErrorResponse(resp)
+ if err != nil {
+ log.Warn().Err(err).Msg("Failed to validate secret")
+ }
+ return result, extra
+}
+
+func checkGCPErrorResponse(resp *http.Response) (secrets.ValidationResult, string, error) {
+ if resp.StatusCode == http.StatusOK {
+ return secrets.ValidResult, "", nil
+ }
+
+ if resp.StatusCode != http.StatusForbidden {
+ return secrets.InvalidResult, "", nil
+ }
+
+ bodyBytes, err := io.ReadAll(resp.Body)
+ if err != nil {
+ return secrets.UnknownResult, "", err
+ }
+
+ // Unmarshal the response body into the ErrorResponse struct
+ var errorResponse errorResponse
+ err = json.Unmarshal(bodyBytes, &errorResponse)
+ if err != nil {
+ return secrets.UnknownResult, "", err
+ }
+
+ if strings.Contains(errorResponse.Error.Message, "YouTube Data API v3 has not been used in project") {
+ extra := ""
+ for _, detail := range errorResponse.Error.Details {
+ if detail.Type == "type.googleapis.com/google.rpc.ErrorInfo" {
+ extra = detail.Metadata.Consumer
+ }
+ }
+ return secrets.ValidResult, extra, nil
+ }
+
+ return secrets.UnknownResult, "", nil
+
+}
diff --git a/engine/validation/github.go b/engine/validation/github.go
index 643ee819..4885b5b8 100644
--- a/engine/validation/github.go
+++ b/engine/validation/github.go
@@ -1,25 +1,25 @@
-package validation
-
-import (
- "fmt"
- "net/http"
-
- "github.com/checkmarx/2ms/lib/secrets"
- "github.com/rs/zerolog/log"
-)
-
-func validateGithub(s *secrets.Secret) (secrets.ValidationResult, string) {
- const githubURL = "https://api.github.com/"
-
- resp, err := sendValidationRequest(githubURL, fmt.Sprintf("token %s", s.Value))
-
- if err != nil {
- log.Warn().Err(err).Msg("Failed to validate secret")
- return secrets.UnknownResult, ""
- }
-
- if resp.StatusCode == http.StatusOK {
- return secrets.ValidResult, ""
- }
- return secrets.InvalidResult, ""
-}
+package validation
+
+import (
+ "fmt"
+ "net/http"
+
+ "github.com/checkmarx/2ms/lib/secrets"
+ "github.com/rs/zerolog/log"
+)
+
+func validateGithub(s *secrets.Secret) (secrets.ValidationResult, string) {
+ const githubURL = "https://api.github.com/"
+
+ resp, err := sendValidationRequest(githubURL, fmt.Sprintf("token %s", s.Value))
+
+ if err != nil {
+ log.Warn().Err(err).Msg("Failed to validate secret")
+ return secrets.UnknownResult, ""
+ }
+
+ if resp.StatusCode == http.StatusOK {
+ return secrets.ValidResult, ""
+ }
+ return secrets.InvalidResult, ""
+}
diff --git a/engine/validation/gitlab.go b/engine/validation/gitlab.go
index 4cd635e8..d6c9bf0b 100644
--- a/engine/validation/gitlab.go
+++ b/engine/validation/gitlab.go
@@ -1,43 +1,43 @@
-package validation
-
-import (
- "encoding/json"
- "fmt"
- "io"
- "net/http"
-
- "github.com/checkmarx/2ms/lib/secrets"
- "github.com/rs/zerolog/log"
-)
-
-type userResponse struct {
- WebURL string `json:"web_url"`
-}
-
-func validateGitlab(s *secrets.Secret) (secrets.ValidationResult, string) {
- const gitlabURL = "https://gitlab.com/api/v4/user"
-
- resp, err := sendValidationRequest(gitlabURL, fmt.Sprintf("Bearer %s", s.Value))
-
- if err != nil {
- log.Warn().Err(err).Msg("Failed to validate secret")
- return secrets.UnknownResult, ""
- }
-
- if resp.StatusCode == http.StatusOK {
- bodyBytes, err := io.ReadAll(resp.Body)
- if err != nil {
- log.Warn().Err(err).Msg("Failed to read response body for Gitlab validation")
- return secrets.ValidResult, ""
- }
-
- var user userResponse
- if err := json.Unmarshal(bodyBytes, &user); err != nil {
- log.Warn().Err(err).Msg("Failed to unmarshal response body for Gitlab validation")
- return secrets.ValidResult, ""
- }
-
- return secrets.ValidResult, user.WebURL
- }
- return secrets.InvalidResult, ""
-}
+package validation
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+ "net/http"
+
+ "github.com/checkmarx/2ms/lib/secrets"
+ "github.com/rs/zerolog/log"
+)
+
+type userResponse struct {
+ WebURL string `json:"web_url"`
+}
+
+func validateGitlab(s *secrets.Secret) (secrets.ValidationResult, string) {
+ const gitlabURL = "https://gitlab.com/api/v4/user"
+
+ resp, err := sendValidationRequest(gitlabURL, fmt.Sprintf("Bearer %s", s.Value))
+
+ if err != nil {
+ log.Warn().Err(err).Msg("Failed to validate secret")
+ return secrets.UnknownResult, ""
+ }
+
+ if resp.StatusCode == http.StatusOK {
+ bodyBytes, err := io.ReadAll(resp.Body)
+ if err != nil {
+ log.Warn().Err(err).Msg("Failed to read response body for Gitlab validation")
+ return secrets.ValidResult, ""
+ }
+
+ var user userResponse
+ if err := json.Unmarshal(bodyBytes, &user); err != nil {
+ log.Warn().Err(err).Msg("Failed to unmarshal response body for Gitlab validation")
+ return secrets.ValidResult, ""
+ }
+
+ return secrets.ValidResult, user.WebURL
+ }
+ return secrets.InvalidResult, ""
+}
diff --git a/engine/validation/pairs.go b/engine/validation/pairs.go
index 68ed1e0d..e87ef38e 100644
--- a/engine/validation/pairs.go
+++ b/engine/validation/pairs.go
@@ -1,66 +1,63 @@
-package validation
-
-import (
- "sync"
-
- "github.com/checkmarx/2ms/lib/secrets"
-)
-
-type pairsByRuleId map[string][]*secrets.Secret
-type pairsBySource map[string]pairsByRuleId
-type pairsByGeneralKey map[string]pairsBySource
-
-type pairsCollector struct {
- pairs pairsByGeneralKey
-}
-
-func newPairsCollector() *pairsCollector {
- return &pairsCollector{pairs: make(pairsByGeneralKey)}
-}
-
-func (p *pairsCollector) addIfNeeded(secret *secrets.Secret) bool {
- generalKey, ok := ruleToGeneralKey[secret.RuleID]
- if !ok {
- return false
- }
-
- if _, ok := p.pairs[generalKey]; !ok {
- p.pairs[generalKey] = make(pairsBySource)
- }
- if _, ok := p.pairs[generalKey][secret.Source]; !ok {
- p.pairs[generalKey][secret.Source] = make(pairsByRuleId)
- }
- if _, ok := p.pairs[generalKey][secret.Source][secret.RuleID]; !ok {
- p.pairs[generalKey][secret.Source][secret.RuleID] = make([]*secrets.Secret, 0)
- }
-
- p.pairs[generalKey][secret.Source][secret.RuleID] = append(p.pairs[generalKey][secret.Source][secret.RuleID], secret)
- return true
-}
-
-func (p *pairsCollector) validate(generalKey string, rulesById pairsByRuleId, wg *sync.WaitGroup) {
- defer wg.Done()
- generalKeyToValidation[generalKey](rulesById)
-}
-
-type pairsValidationFunc func(pairsByRuleId)
-
-var generalKeyToValidation = map[string]pairsValidationFunc{
- "alibaba": validateAlibaba,
-}
-
-var generalKeyToRules = map[string][]string{
- "alibaba": {"alibaba-access-key-id", "alibaba-secret-key"},
-}
-
-func generateRuleToGeneralKey() map[string]string {
- ruleToGeneralKey := make(map[string]string)
- for key, rules := range generalKeyToRules {
- for _, rule := range rules {
- ruleToGeneralKey[rule] = key
- }
- }
- return ruleToGeneralKey
-}
-
-var ruleToGeneralKey = generateRuleToGeneralKey()
+package validation
+
+import (
+ "github.com/checkmarx/2ms/lib/secrets"
+)
+
+type pairsByRuleId map[string][]*secrets.Secret
+type pairsBySource map[string]pairsByRuleId
+type pairsByGeneralKey map[string]pairsBySource
+
+type pairsCollector struct {
+ pairs pairsByGeneralKey
+}
+
+func newPairsCollector() *pairsCollector {
+ return &pairsCollector{pairs: make(pairsByGeneralKey)}
+}
+
+func (p *pairsCollector) addIfNeeded(secret *secrets.Secret) bool {
+ generalKey, ok := ruleToGeneralKey[secret.RuleID]
+ if !ok {
+ return false
+ }
+
+ if _, ok := p.pairs[generalKey]; !ok {
+ p.pairs[generalKey] = make(pairsBySource)
+ }
+ if _, ok := p.pairs[generalKey][secret.Source]; !ok {
+ p.pairs[generalKey][secret.Source] = make(pairsByRuleId)
+ }
+ if _, ok := p.pairs[generalKey][secret.Source][secret.RuleID]; !ok {
+ p.pairs[generalKey][secret.Source][secret.RuleID] = make([]*secrets.Secret, 0)
+ }
+
+ p.pairs[generalKey][secret.Source][secret.RuleID] = append(p.pairs[generalKey][secret.Source][secret.RuleID], secret)
+ return true
+}
+
+func (p *pairsCollector) validate(generalKey string, rulesById pairsByRuleId) {
+ generalKeyToValidation[generalKey](rulesById)
+}
+
+type pairsValidationFunc func(pairsByRuleId)
+
+var generalKeyToValidation = map[string]pairsValidationFunc{
+ "alibaba": validateAlibaba,
+}
+
+var generalKeyToRules = map[string][]string{
+ "alibaba": {"alibaba-access-key-id", "alibaba-secret-key"},
+}
+
+func generateRuleToGeneralKey() map[string]string {
+ ruleToGeneralKey := make(map[string]string)
+ for key, rules := range generalKeyToRules {
+ for _, rule := range rules {
+ ruleToGeneralKey[rule] = key
+ }
+ }
+ return ruleToGeneralKey
+}
+
+var ruleToGeneralKey = generateRuleToGeneralKey()
diff --git a/engine/validation/validator.go b/engine/validation/validator.go
index ae8e8a18..6a44289b 100644
--- a/engine/validation/validator.go
+++ b/engine/validation/validator.go
@@ -1,69 +1,64 @@
-package validation
-
-import (
- "sync"
-
- "github.com/checkmarx/2ms/engine/extra"
- "github.com/checkmarx/2ms/lib/secrets"
-)
-
-type validationFunc = func(*secrets.Secret) (secrets.ValidationResult, string)
-
-var ruleIDToFunction = map[string]validationFunc{
- "github-fine-grained-pat": validateGithub,
- "github-pat": validateGithub,
- "gitlab-pat": validateGitlab,
- "gcp-api-key": validateGCP,
-}
-
-type Validator struct {
- pairsCollector *pairsCollector
-}
-
-func NewValidator() *Validator {
- return &Validator{pairsCollector: newPairsCollector()}
-}
-
-func (v *Validator) RegisterForValidation(secret *secrets.Secret) {
- if validate, ok := ruleIDToFunction[secret.RuleID]; ok {
- status, extra := validate(secret)
- secret.ValidationStatus = status
- addExtraToSecret(secret, extra)
- } else if !v.pairsCollector.addIfNeeded(secret) {
- secret.ValidationStatus = secrets.UnknownResult
- }
-}
-
-func (v *Validator) Validate() {
- wg := &sync.WaitGroup{}
- for generalKey, bySource := range v.pairsCollector.pairs {
- for _, byRule := range bySource {
- wg.Add(1)
- v.pairsCollector.validate(generalKey, byRule, wg)
- }
- }
- wg.Wait()
-}
-
-func IsCanValidateRule(ruleID string) bool {
- if _, ok := ruleIDToFunction[ruleID]; ok {
- return true
- }
- if _, ok := ruleToGeneralKey[ruleID]; ok {
- return true
- }
-
- return false
-}
-
-func addExtraToSecret(secret *secrets.Secret, extraData string) {
- if extraData == "" {
- return
- }
-
- if secret.ExtraDetails == nil {
- secret.ExtraDetails = make(map[string]interface{})
- }
-
- extra.UpdateExtraField(secret, "validationDetails", extraData)
-}
+package validation
+
+import (
+ "github.com/checkmarx/2ms/engine/extra"
+ "github.com/checkmarx/2ms/lib/secrets"
+)
+
+type validationFunc = func(*secrets.Secret) (secrets.ValidationResult, string)
+
+var ruleIDToFunction = map[string]validationFunc{
+ "github-fine-grained-pat": validateGithub,
+ "github-pat": validateGithub,
+ "gitlab-pat": validateGitlab,
+ "gcp-api-key": validateGCP,
+}
+
+type Validator struct {
+ pairsCollector *pairsCollector
+}
+
+func NewValidator() *Validator {
+ return &Validator{pairsCollector: newPairsCollector()}
+}
+
+func (v *Validator) RegisterForValidation(secret *secrets.Secret) {
+ if validate, ok := ruleIDToFunction[secret.RuleID]; ok {
+ status, extra := validate(secret)
+ secret.ValidationStatus = status
+ addExtraToSecret(secret, extra)
+ } else if !v.pairsCollector.addIfNeeded(secret) {
+ secret.ValidationStatus = secrets.UnknownResult
+ }
+}
+
+func (v *Validator) Validate() {
+ for generalKey, bySource := range v.pairsCollector.pairs {
+ for _, byRule := range bySource {
+ v.pairsCollector.validate(generalKey, byRule)
+ }
+ }
+}
+
+func IsCanValidateRule(ruleID string) bool {
+ if _, ok := ruleIDToFunction[ruleID]; ok {
+ return true
+ }
+ if _, ok := ruleToGeneralKey[ruleID]; ok {
+ return true
+ }
+
+ return false
+}
+
+func addExtraToSecret(secret *secrets.Secret, extraData string) {
+ if extraData == "" {
+ return
+ }
+
+ if secret.ExtraDetails == nil {
+ secret.ExtraDetails = make(map[string]interface{})
+ }
+
+ extra.UpdateExtraField(secret, "validationDetails", extraData)
+}
diff --git a/go.mod b/go.mod
index 003a49b1..13cbb14d 100644
--- a/go.mod
+++ b/go.mod
@@ -12,6 +12,7 @@ require (
github.com/spf13/viper v1.18.2-0.20240419203757-d539b7a2462e
github.com/stretchr/testify v1.9.0
github.com/zricethezav/gitleaks/v8 v8.18.2
+ golang.org/x/sync v0.10.0
golang.org/x/time v0.5.0
gopkg.in/yaml.v3 v3.0.1
)
@@ -48,7 +49,6 @@ require (
go.uber.org/multierr v1.11.0 // indirect
golang.org/x/crypto v0.32.0 // indirect
golang.org/x/exp v0.0.0-20240416160154-fe59bbe5cc7f // indirect
- golang.org/x/sync v0.10.0 // indirect
golang.org/x/sys v0.29.0 // indirect
golang.org/x/text v0.21.0 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect
diff --git a/ignore.openvex b/ignore.openvex
index c65473cc..c7048009 100644
--- a/ignore.openvex
+++ b/ignore.openvex
@@ -1,9 +1,9 @@
-{
- "@context": "https://openvex.dev/ns/v0.2.0",
- "@id": "https://openvex.dev/docs/public/vex-d906fd067a1c6d14702845312a6bddd365153fe7c9eb651e6d7b89dad2bc9d22",
- "author": "Monica Casanova",
- "timestamp": "2024-04-11T16:02:39.0223474+01:00",
- "version": 1,
- "statements": [
- ]
+{
+ "@context": "https://openvex.dev/ns/v0.2.0",
+ "@id": "https://openvex.dev/docs/public/vex-d906fd067a1c6d14702845312a6bddd365153fe7c9eb651e6d7b89dad2bc9d22",
+ "author": "Monica Casanova",
+ "timestamp": "2024-04-11T16:02:39.0223474+01:00",
+ "version": 1,
+ "statements": [
+ ]
}
\ No newline at end of file
diff --git a/lib/config/config.go b/lib/config/config.go
index bebe909b..da3c9a43 100644
--- a/lib/config/config.go
+++ b/lib/config/config.go
@@ -1,10 +1,10 @@
-package config
-
-type Config struct {
- Name string
- Version string
-}
-
-func LoadConfig(name string, version string) *Config {
- return &Config{Name: name, Version: version}
-}
+package config
+
+type Config struct {
+ Name string
+ Version string
+}
+
+func LoadConfig(name string, version string) *Config {
+ return &Config{Name: name, Version: version}
+}
diff --git a/lib/secrets/secret.go b/lib/secrets/secret.go
index 5d505518..9f4db96b 100644
--- a/lib/secrets/secret.go
+++ b/lib/secrets/secret.go
@@ -1,49 +1,49 @@
-package secrets
-
-type ValidationResult string
-
-const (
- ValidResult ValidationResult = "Valid"
- InvalidResult ValidationResult = "Invalid"
- UnknownResult ValidationResult = "Unknown"
-)
-
-type compared int
-
-const (
- first compared = -1
- second compared = 1
- equal compared = 0
-)
-
-func (v ValidationResult) CompareTo(other ValidationResult) compared {
- if v == other {
- return equal
- }
- if v == UnknownResult {
- return second
- }
- if other == UnknownResult {
- return first
- }
- if v == InvalidResult {
- return second
- }
- return first
-}
-
-type Secret struct {
- ID string `json:"id"`
- Source string `json:"source"`
- RuleID string `json:"ruleId"`
- StartLine int `json:"startLine"`
- EndLine int `json:"endLine"`
- LineContent string `json:"lineContent"`
- StartColumn int `json:"startColumn"`
- EndColumn int `json:"endColumn"`
- Value string `json:"value"`
- ValidationStatus ValidationResult `json:"validationStatus,omitempty"`
- RuleDescription string `json:"ruleDescription,omitempty"`
- ExtraDetails map[string]interface{} `json:"extraDetails,omitempty"`
- CvssScore float64 `json:"cvssScore,omitempty"`
-}
+package secrets
+
+type ValidationResult string
+
+const (
+ ValidResult ValidationResult = "Valid"
+ InvalidResult ValidationResult = "Invalid"
+ UnknownResult ValidationResult = "Unknown"
+)
+
+type compared int
+
+const (
+ first compared = -1
+ second compared = 1
+ equal compared = 0
+)
+
+func (v ValidationResult) CompareTo(other ValidationResult) compared {
+ if v == other {
+ return equal
+ }
+ if v == UnknownResult {
+ return second
+ }
+ if other == UnknownResult {
+ return first
+ }
+ if v == InvalidResult {
+ return second
+ }
+ return first
+}
+
+type Secret struct {
+ ID string `json:"id"`
+ Source string `json:"source"`
+ RuleID string `json:"ruleId"`
+ StartLine int `json:"startLine"`
+ EndLine int `json:"endLine"`
+ LineContent string `json:"lineContent"`
+ StartColumn int `json:"startColumn"`
+ EndColumn int `json:"endColumn"`
+ Value string `json:"value"`
+ ValidationStatus ValidationResult `json:"validationStatus,omitempty"`
+ RuleDescription string `json:"ruleDescription,omitempty"`
+ ExtraDetails map[string]interface{} `json:"extraDetails,omitempty"`
+ CvssScore float64 `json:"cvssScore,omitempty"`
+}
diff --git a/lib/secrets/secret_test.go b/lib/secrets/secret_test.go
index e88fb7e2..64bf46b2 100644
--- a/lib/secrets/secret_test.go
+++ b/lib/secrets/secret_test.go
@@ -1,49 +1,49 @@
-package secrets
-
-import (
- "testing"
-)
-
-func TestValidationResultCompareTo(t *testing.T) {
- testCases := []struct {
- first ValidationResult
- second ValidationResult
- want compared
- message string
- }{
- {
- first: ValidResult,
- second: ValidResult,
- want: equal,
- message: "Valid should be equal to Valid",
- },
- {
- first: InvalidResult,
- second: ValidResult,
- want: second,
- message: "Valid should be greater than Invalid",
- },
- {
- first: ValidResult,
- second: UnknownResult,
- want: first,
- message: "Valid should be greater than Unknown",
- },
- {
- first: UnknownResult,
- second: InvalidResult,
- want: second,
- message: "Invalid should be greater than Unknown",
- },
- }
-
- for _, tc := range testCases {
- t.Run(tc.message, func(t *testing.T) {
- got := tc.first.CompareTo(tc.second)
- if got != tc.want {
- t.Errorf("got %d, want %d", got, tc.want)
- }
- },
- )
- }
-}
+package secrets
+
+import (
+ "testing"
+)
+
+func TestValidationResultCompareTo(t *testing.T) {
+ testCases := []struct {
+ first ValidationResult
+ second ValidationResult
+ want compared
+ message string
+ }{
+ {
+ first: ValidResult,
+ second: ValidResult,
+ want: equal,
+ message: "Valid should be equal to Valid",
+ },
+ {
+ first: InvalidResult,
+ second: ValidResult,
+ want: second,
+ message: "Valid should be greater than Invalid",
+ },
+ {
+ first: ValidResult,
+ second: UnknownResult,
+ want: first,
+ message: "Valid should be greater than Unknown",
+ },
+ {
+ first: UnknownResult,
+ second: InvalidResult,
+ want: second,
+ message: "Invalid should be greater than Unknown",
+ },
+ }
+
+ for _, tc := range testCases {
+ t.Run(tc.message, func(t *testing.T) {
+ got := tc.first.CompareTo(tc.second)
+ if got != tc.want {
+ t.Errorf("got %d, want %d", got, tc.want)
+ }
+ },
+ )
+ }
+}
diff --git a/lib/utils/channels.go b/lib/utils/channels.go
index 33701b3e..3a3dd2e3 100644
--- a/lib/utils/channels.go
+++ b/lib/utils/channels.go
@@ -1,12 +1,12 @@
-package utils
-
-import "sync"
-
-func BindChannels[T any](source <-chan T, dest chan<- T, wg *sync.WaitGroup) {
- if wg != nil {
- defer wg.Done()
- }
- for item := range source {
- dest <- item
- }
-}
+package utils
+
+import "sync"
+
+func BindChannels[T any](source <-chan T, dest chan<- T, wg *sync.WaitGroup) {
+ if wg != nil {
+ defer wg.Done()
+ }
+ for item := range source {
+ dest <- item
+ }
+}
diff --git a/lib/utils/flags.go b/lib/utils/flags.go
index 1a0a5ad0..4cbca2a0 100644
--- a/lib/utils/flags.go
+++ b/lib/utils/flags.go
@@ -1,98 +1,98 @@
-package utils
-
-import (
- "fmt"
- "path/filepath"
- "strings"
-
- "github.com/rs/zerolog/log"
- "github.com/spf13/cobra"
- "github.com/spf13/pflag"
- "github.com/spf13/viper"
-)
-
-func LoadConfig(v *viper.Viper, configFilePath string) error {
- if configFilePath == "" {
- return nil
- }
-
- configType := strings.TrimPrefix(filepath.Ext(configFilePath), ".")
-
- v.SetConfigType(configType)
- v.SetConfigFile(configFilePath)
- return v.ReadInConfig()
-}
-
-// TODO: can be a package
-
-// BindFlags fill flags values with config file or environment variables data
-func BindFlags(cmd *cobra.Command, v *viper.Viper, envPrefix string) error {
- commandHierarchy := getCommandHierarchy(cmd)
-
- bindFlag := func(f *pflag.Flag) {
- fullFlagName := fmt.Sprintf("%s%s", commandHierarchy, f.Name)
- bindEnvVarIntoViper(v, fullFlagName, envPrefix)
-
- if f.Changed {
- return
- }
-
- if v.IsSet(fullFlagName) {
- val := v.Get(fullFlagName)
- applyViperFlagToCommand(f, val, cmd)
- }
- }
- cmd.PersistentFlags().VisitAll(bindFlag)
- cmd.Flags().VisitAll(bindFlag)
-
- for _, subCmd := range cmd.Commands() {
- if err := BindFlags(subCmd, v, envPrefix); err != nil {
- return err
- }
- }
-
- return nil
-}
-
-func bindEnvVarIntoViper(v *viper.Viper, fullFlagName, envPrefix string) {
- envVarSuffix := strings.ToUpper(strings.ReplaceAll(strings.ReplaceAll(fullFlagName, "-", "_"), ".", "_"))
- envVarName := fmt.Sprintf("%s_%s", envPrefix, envVarSuffix)
-
- if err := v.BindEnv(fullFlagName, envVarName, strings.ToLower(envVarName)); err != nil {
- log.Err(err).Msg("Failed to bind Viper flags")
- }
-}
-
-func applyViperFlagToCommand(flag *pflag.Flag, val interface{}, cmd *cobra.Command) {
- switch t := val.(type) {
- case []interface{}:
- for _, param := range t {
- if err := flag.Value.Set(param.(string)); err != nil {
- log.Err(err).Msg("Failed to set Viper flags")
- }
- }
- default:
- newVal := fmt.Sprintf("%v", val)
- if err := flag.Value.Set(newVal); err != nil {
- log.Err(err).Msg("Failed to set Viper flags")
- }
- }
- flag.Changed = true
-}
-
-func getCommandHierarchy(cmd *cobra.Command) string {
- names := []string{}
- if !cmd.HasParent() {
- return ""
- }
-
- for parent := cmd; parent.HasParent() && parent.Name() != ""; parent = parent.Parent() {
- names = append([]string{parent.Name()}, names...)
- }
-
- if len(names) == 0 {
- return ""
- }
-
- return strings.Join(names, ".") + "."
-}
+package utils
+
+import (
+ "fmt"
+ "path/filepath"
+ "strings"
+
+ "github.com/rs/zerolog/log"
+ "github.com/spf13/cobra"
+ "github.com/spf13/pflag"
+ "github.com/spf13/viper"
+)
+
+func LoadConfig(v *viper.Viper, configFilePath string) error {
+ if configFilePath == "" {
+ return nil
+ }
+
+ configType := strings.TrimPrefix(filepath.Ext(configFilePath), ".")
+
+ v.SetConfigType(configType)
+ v.SetConfigFile(configFilePath)
+ return v.ReadInConfig()
+}
+
+// TODO: can be a package
+
+// BindFlags fill flags values with config file or environment variables data
+func BindFlags(cmd *cobra.Command, v *viper.Viper, envPrefix string) error {
+ commandHierarchy := getCommandHierarchy(cmd)
+
+ bindFlag := func(f *pflag.Flag) {
+ fullFlagName := fmt.Sprintf("%s%s", commandHierarchy, f.Name)
+ bindEnvVarIntoViper(v, fullFlagName, envPrefix)
+
+ if f.Changed {
+ return
+ }
+
+ if v.IsSet(fullFlagName) {
+ val := v.Get(fullFlagName)
+ applyViperFlagToCommand(f, val, cmd)
+ }
+ }
+ cmd.PersistentFlags().VisitAll(bindFlag)
+ cmd.Flags().VisitAll(bindFlag)
+
+ for _, subCmd := range cmd.Commands() {
+ if err := BindFlags(subCmd, v, envPrefix); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func bindEnvVarIntoViper(v *viper.Viper, fullFlagName, envPrefix string) {
+ envVarSuffix := strings.ToUpper(strings.ReplaceAll(strings.ReplaceAll(fullFlagName, "-", "_"), ".", "_"))
+ envVarName := fmt.Sprintf("%s_%s", envPrefix, envVarSuffix)
+
+ if err := v.BindEnv(fullFlagName, envVarName, strings.ToLower(envVarName)); err != nil {
+ log.Err(err).Msg("Failed to bind Viper flags")
+ }
+}
+
+func applyViperFlagToCommand(flag *pflag.Flag, val interface{}, cmd *cobra.Command) {
+ switch t := val.(type) {
+ case []interface{}:
+ for _, param := range t {
+ if err := flag.Value.Set(param.(string)); err != nil {
+ log.Err(err).Msg("Failed to set Viper flags")
+ }
+ }
+ default:
+ newVal := fmt.Sprintf("%v", val)
+ if err := flag.Value.Set(newVal); err != nil {
+ log.Err(err).Msg("Failed to set Viper flags")
+ }
+ }
+ flag.Changed = true
+}
+
+func getCommandHierarchy(cmd *cobra.Command) string {
+ names := []string{}
+ if !cmd.HasParent() {
+ return ""
+ }
+
+ for parent := cmd; parent.HasParent() && parent.Name() != ""; parent = parent.Parent() {
+ names = append([]string{parent.Name()}, names...)
+ }
+
+ if len(names) == 0 {
+ return ""
+ }
+
+ return strings.Join(names, ".") + "."
+}
diff --git a/lib/utils/flags_test.go b/lib/utils/flags_test.go
index 5efd71e2..7d18b631 100644
--- a/lib/utils/flags_test.go
+++ b/lib/utils/flags_test.go
@@ -1,763 +1,763 @@
-package utils_test
-
-import (
- "bytes"
- "os"
- "strings"
- "testing"
-
- "github.com/checkmarx/2ms/lib/utils"
- "github.com/spf13/cobra"
- "github.com/spf13/viper"
- "github.com/stretchr/testify/assert"
-)
-
-const envVarPrefix = "PREFIX"
-
-func TestBindFlags(t *testing.T) {
- t.Run("BindFlags_TestEmptyViper", func(t *testing.T) {
- assertClearEnv(t)
- defer clearEnvVars(t)
-
- cmd := &cobra.Command{}
- v := getViper()
-
- var (
- testString string
- testInt int
- testBool bool
- testFloat64 float64
- )
-
- cmd.PersistentFlags().StringVar(&testString, "test-string", "", "Test string flag")
- cmd.PersistentFlags().IntVar(&testInt, "test-int", 0, "Test int flag")
- cmd.PersistentFlags().BoolVar(&testBool, "test-bool", false, "Test bool flag")
- cmd.PersistentFlags().Float64Var(&testFloat64, "test-float64", 0.0, "Test float64 flag")
-
- err := utils.BindFlags(cmd, v, envVarPrefix)
- assert.NoError(t, err)
-
- assert.Empty(t, testString)
- assert.Empty(t, testInt)
- assert.Empty(t, testBool)
- assert.Empty(t, testFloat64)
- })
-
- t.Run("BindFlags_FromEnvVarsToCobraCommand", func(t *testing.T) {
- assertClearEnv(t)
- defer clearEnvVars(t)
-
- cmd := &cobra.Command{}
- v := getViper()
- v.SetEnvPrefix(envVarPrefix)
-
- var (
- testString string
- testInt int
- testBool bool
- testFloat64 float64
- )
-
- cmd.PersistentFlags().StringVar(&testString, "test-string", "", "Test string flag")
- cmd.PersistentFlags().IntVar(&testInt, "test-int", 0, "Test int flag")
- cmd.PersistentFlags().BoolVar(&testBool, "test-bool", false, "Test bool flag")
- cmd.PersistentFlags().Float64Var(&testFloat64, "test-float64", 0.0, "Test float64 flag")
-
- err := setEnv("PREFIX_TEST_STRING", "test-string-value")
- assert.NoError(t, err)
- err = setEnv("PREFIX_TEST_INT", "456")
- assert.NoError(t, err)
- err = setEnv("PREFIX_TEST_BOOL", "true")
- assert.NoError(t, err)
- err = setEnv("PREFIX_TEST_FLOAT64", "1.23")
- assert.NoError(t, err)
-
- err = utils.BindFlags(cmd, v, envVarPrefix)
- assert.NoError(t, err)
-
- assert.Equal(t, "test-string-value", testString)
- assert.Equal(t, 456, testInt)
- assert.Equal(t, true, testBool)
- assert.Equal(t, 1.23, testFloat64)
- })
-
- t.Run("BindFlags_NonPersistentFlags", func(t *testing.T) {
- assertClearEnv(t)
- defer clearEnvVars(t)
-
- cmd := &cobra.Command{}
- v := getViper()
-
- var (
- testString string
- )
-
- cmd.Flags().StringVar(&testString, "test-string", "", "Test string flag")
-
- err := setEnv("PREFIX_TEST_STRING", "test-string-value")
- assert.NoError(t, err)
-
- err = utils.BindFlags(cmd, v, envVarPrefix)
- assert.NoError(t, err)
-
- assert.Equal(t, "test-string-value", testString)
- })
-
- t.Run("BindFlags_Subcommand", func(t *testing.T) {
- assertClearEnv(t)
- defer clearEnvVars(t)
-
- var (
- testString string
- testInt int
- )
-
- subCommand := &cobra.Command{
- Use: "subCommand",
- }
- subCommand.Flags().StringVar(&testString, "test-string", "", "Test string flag")
- subCommand.PersistentFlags().IntVar(&testInt, "test-int", 0, "Test int flag")
-
- cmd := &cobra.Command{}
- cmd.AddCommand(subCommand)
- v := getViper()
-
- err := setEnv("PREFIX_SUBCOMMAND_TEST_STRING", "test-string-value")
- assert.NoError(t, err)
- err = setEnv("PREFIX_SUBCOMMAND_TEST_INT", "456")
- assert.NoError(t, err)
-
- err = utils.BindFlags(cmd, v, envVarPrefix)
- assert.NoError(t, err)
-
- assert.Equal(t, "test-string-value", testString)
- assert.Equal(t, 456, testInt)
- })
-
- t.Run("BindFlags_ArrayFlag", func(t *testing.T) {
- assertClearEnv(t)
- defer clearEnvVars(t)
-
- arr := []string{"test", "array", "flag"}
-
- cmd := &cobra.Command{}
- v := getViper()
-
- var (
- // testArraySpaces []string
- testArrayCommas []string
- )
-
- // cmd.PersistentFlags().StringSliceVar(&testArraySpaces, "test-array-spaces", []string{}, "Test array flag")
- cmd.PersistentFlags().StringSliceVar(&testArrayCommas, "test-array-commas", []string{}, "Test array flag")
-
- // err := setEnv("PREFIX_TEST_ARRAY_SPACES", strings.Join(arr, " "))
- // assert.NoError(t, err)
- err := setEnv("PREFIX_TEST_ARRAY_COMMAS", strings.Join(arr, ","))
- assert.NoError(t, err)
-
- err = utils.BindFlags(cmd, v, envVarPrefix)
- assert.NoError(t, err)
-
- // assert.Equal(t, testArraySpaces, arr)
- assert.Equal(t, arr, testArrayCommas)
- })
-
- t.Run("BindFlags_ReturnsErrorForUnknownConfigurationKeys", func(t *testing.T) {
- t.Skip("Not sure if we need this feature.")
- assertClearEnv(t)
- defer clearEnvVars(t)
-
- cmd := &cobra.Command{}
- v := getViper()
-
- var (
- testString string
- )
-
- cmd.PersistentFlags().StringVar(&testString, "test-string", "", "Test string flag")
-
- v.Set("unknown-key", "unknown-value")
-
- err := utils.BindFlags(cmd, v, envVarPrefix)
-
- assert.EqualError(t, err, "unknown configuration key: 'unknown-key'\nShowing help for '' command")
- })
-
- t.Run("BindFlags_LowerCaseEnvVars", func(t *testing.T) {
- assertClearEnv(t)
- defer clearEnvVars(t)
-
- cmd := &cobra.Command{}
- v := getViper()
-
- var (
- testString string
- )
-
- cmd.PersistentFlags().StringVar(&testString, "test-string", "", "Test string flag")
-
- err := setEnv("prefix_test_string", "test-string-value")
- assert.NoError(t, err)
-
- err = utils.BindFlags(cmd, v, envVarPrefix)
- assert.NoError(t, err)
-
- assert.Equal(t, "test-string-value", testString)
- })
-
- t.Run("BindFlags_OneWordFlagName", func(t *testing.T) {
- assertClearEnv(t)
- defer clearEnvVars(t)
-
- cmd := &cobra.Command{}
- v := getViper()
-
- var (
- testString string
- )
-
- cmd.Flags().StringVar(&testString, "teststring", "", "Test string flag")
-
- err := setEnv("prefix_teststring", "test-string-value")
- assert.NoError(t, err)
-
- err = utils.BindFlags(cmd, v, envVarPrefix)
- assert.NoError(t, err)
-
- assert.Equal(t, "test-string-value", testString)
- })
-
- t.Run("BindFlags_SameFlagNameDifferentCmd", func(t *testing.T) {
-
- assertClearEnv(t)
- defer clearEnvVars(t)
-
- rootCmd := &cobra.Command{
- Use: "root",
- }
- cmd1 := &cobra.Command{
- Use: "cmd1",
- }
- cmd2 := &cobra.Command{
- Use: "cmd2",
- }
- v := getViper()
-
- var (
- testStringRoot string
- testStringPersistentRoot string
- testString1 string
- testStringPersistent1 string
- testString2 string
- testStringPersistent2 string
- )
-
- rootCmd.Flags().StringVar(&testStringRoot, "test-string", "", "Test string flag")
- rootCmd.PersistentFlags().StringVar(&testStringPersistentRoot, "test-string-persistent", "", "Test string flag")
- cmd1.Flags().StringVar(&testString1, "test-string", "", "Test string flag")
- cmd1.PersistentFlags().StringVar(&testStringPersistent1, "test-string-persistent", "", "Test string flag")
- cmd2.Flags().StringVar(&testString2, "test-string", "", "Test string flag")
- cmd2.PersistentFlags().StringVar(&testStringPersistent2, "test-string-persistent", "", "Test string flag")
-
- rootCmd.AddCommand(cmd1)
- rootCmd.AddCommand(cmd2)
-
- err := setEnv("prefix_test_string", "test-string-value")
- assert.NoError(t, err)
- err = setEnv("prefix_test_string_persistent", "test-string-persistent-value")
- assert.NoError(t, err)
- err = setEnv("prefix_cmd1_test_string", "test-string-value-cmd1")
- assert.NoError(t, err)
- err = setEnv("prefix_cmd1_test_string_persistent", "test-string-persistent-value-cmd1")
- assert.NoError(t, err)
- err = setEnv("prefix_cmd2_test_string", "test-string-value-cmd2")
- assert.NoError(t, err)
- err = setEnv("prefix_cmd2_test_string_persistent", "test-string-persistent-value-cmd2")
- assert.NoError(t, err)
-
- err = utils.BindFlags(rootCmd, v, envVarPrefix)
- assert.NoError(t, err)
-
- assert.Equal(t, "test-string-value", testStringRoot)
- assert.Equal(t, "test-string-persistent-value", testStringPersistentRoot)
- assert.Equal(t, "test-string-value-cmd1", testString1)
- assert.Equal(t, "test-string-persistent-value-cmd1", testStringPersistent1)
- assert.Equal(t, "test-string-value-cmd2", testString2)
- assert.Equal(t, "test-string-persistent-value-cmd2", testStringPersistent2)
- })
-
- t.Run("BindFlags_FromYAML_RootCMD", func(t *testing.T) {
- assertClearEnv(t)
- defer clearEnvVars(t)
-
- yamlConfig := []byte(`
-test-string: test-string-value
-test-int: 123
-test-bool: true
-test-array:
- - test
- - array
- - flag
-test-float: 123.456
-`)
-
- cmd := &cobra.Command{}
- v := getViper()
- v.SetConfigType("yaml")
- assert.NoError(t, v.ReadConfig(bytes.NewBuffer(yamlConfig)))
-
- var (
- testString string
- testInt int
- testBool bool
- testArray []string
- testFloat float64
- )
-
- cmd.PersistentFlags().StringVar(&testString, "test-string", "", "Test string flag")
- cmd.Flags().IntVar(&testInt, "test-int", 0, "Test int flag")
- cmd.PersistentFlags().BoolVar(&testBool, "test-bool", false, "Test bool flag")
- cmd.Flags().StringSliceVar(&testArray, "test-array", []string{}, "Test array flag")
- cmd.PersistentFlags().Float64Var(&testFloat, "test-float", 0, "Test float flag")
-
- err := utils.BindFlags(cmd, v, envVarPrefix)
- assert.NoError(t, err)
-
- assert.Equal(t, "test-string-value", testString)
- assert.Equal(t, 123, testInt)
- assert.Equal(t, true, testBool)
- assert.Equal(t, []string{"test", "array", "flag"}, testArray)
- assert.Equal(t, 123.456, testFloat)
- })
-
- t.Run("BindFlags_FromYAML_StringArrayVar", func(t *testing.T) {
- assertClearEnv(t)
- defer clearEnvVars(t)
-
- yamlConfig := []byte(`
-regex:
- - test\=
- - array\=
- - flag\=
-another-regex: [test\=, array\=, flag\=]
-`)
-
- cmd := &cobra.Command{}
- v := getViper()
- v.SetConfigType("yaml")
- assert.NoError(t, v.ReadConfig(bytes.NewBuffer(yamlConfig)))
-
- var testArray []string
- cmd.Flags().StringArrayVar(&testArray, "regex", []string{}, "Test array flag")
- cmd.Flags().StringArrayVar(&testArray, "another-regex", []string{}, "Test array flag")
-
- err := utils.BindFlags(cmd, v, envVarPrefix)
- assert.NoError(t, err)
-
- assert.Equal(t, []string{"test\\=", "array\\=", "flag\\="}, testArray)
- assert.Equal(t, []string{"test\\=", "array\\=", "flag\\="}, testArray)
- })
-
- t.Run("BindFlags_FromYAML_SubCMD", func(t *testing.T) {
- assertClearEnv(t)
- defer clearEnvVars(t)
-
- yamlConfig := []byte(`
-global-string: global-string-value
-subCommand:
- test-string: test-string-value
- test-int: 123
- test-bool: true
-`)
-
- cmd := &cobra.Command{}
- v := getViper()
- v.SetConfigType("yaml")
- assert.NoError(t, v.ReadConfig(bytes.NewBuffer(yamlConfig)))
-
- var (
- globalString string
- testString string
- testInt int
- testBool bool
- )
-
- cmd.PersistentFlags().StringVar(&globalString, "global-string", "", "Global string flag")
- subCmd := &cobra.Command{
- Use: "subCommand",
- }
- cmd.AddCommand(subCmd)
- subCmd.PersistentFlags().StringVar(&testString, "test-string", "", "Test string flag")
- subCmd.Flags().IntVar(&testInt, "test-int", 0, "Test int flag")
- subCmd.PersistentFlags().BoolVar(&testBool, "test-bool", false, "Test bool flag")
-
- err := utils.BindFlags(cmd, v, envVarPrefix)
- assert.NoError(t, err)
-
- assert.Equal(t, "global-string-value", globalString)
- assert.Equal(t, "test-string-value", testString)
- assert.Equal(t, 123, testInt)
- assert.Equal(t, true, testBool)
- })
-
- t.Run("BindFlags_FromYAML_SubCMD_WithEnvVars", func(t *testing.T) {
- assertClearEnv(t)
- defer clearEnvVars(t)
-
- yamlConfig := []byte(`
-global-string: global-string-value
-subCommand:
- test-string: test-string-value
- test-int: 123
- test-bool: true
-`)
- cmd := &cobra.Command{}
- v := getViper()
- v.SetConfigType("yaml")
- assert.NoError(t, v.ReadConfig(bytes.NewBuffer(yamlConfig)))
-
- var (
- globalString string
- testString string
- testInt int
- testBool bool
- )
-
- cmd.PersistentFlags().StringVar(&globalString, "global-string", "", "Global string flag")
- subCmd := &cobra.Command{
- Use: "subCommand",
- }
- cmd.AddCommand(subCmd)
- subCmd.PersistentFlags().StringVar(&testString, "test-string", "", "Test string flag")
- subCmd.Flags().IntVar(&testInt, "test-int", 0, "Test int flag")
- subCmd.PersistentFlags().BoolVar(&testBool, "test-bool", false, "Test bool flag")
-
- err := setEnv("PREFIX_GLOBAL_STRING", "global-string-value-from-env")
- assert.NoError(t, err)
- err = setEnv("PREFIX_SUBCOMMAND_TEST_STRING", "test-string-value-from-env")
- assert.NoError(t, err)
-
- err = utils.BindFlags(cmd, v, envVarPrefix)
- assert.NoError(t, err)
-
- assert.Equal(t, "global-string-value-from-env", globalString)
- assert.Equal(t, "test-string-value-from-env", testString)
- assert.Equal(t, 123, testInt)
- assert.Equal(t, true, testBool)
- })
-
- t.Run("BindFlags_FromYAML_SubSubCmd", func(t *testing.T) {
- assertClearEnv(t)
- defer clearEnvVars(t)
-
- yamlConfig := []byte(`
-global-string: global-string-value
-subCommand:
- first-string: string-from-sub-command
- subSubCommand:
- second-string: string from sub-sub command
-`)
- cmd := &cobra.Command{}
- v := getViper()
- v.SetConfigType("yaml")
- assert.NoError(t, v.ReadConfig(bytes.NewBuffer(yamlConfig)))
-
- var (
- globalString string
- firstString string
- secondString string
- )
-
- subSubCmd := &cobra.Command{
- Use: "subSubCommand",
- }
- subCmd := &cobra.Command{
- Use: "subCommand",
- }
- subCmd.AddCommand(subSubCmd)
- cmd.AddCommand(subCmd)
- cmd.PersistentFlags().StringVar(&globalString, "global-string", "", "Global string flag")
- subCmd.PersistentFlags().StringVar(&firstString, "first-string", "", "Test string flag")
- subSubCmd.Flags().StringVar(&secondString, "second-string", "", "Test string flag")
-
- err := utils.BindFlags(cmd, v, envVarPrefix)
- assert.NoError(t, err)
-
- assert.Equal(t, "global-string-value", globalString)
- assert.Equal(t, "string-from-sub-command", firstString)
- assert.Equal(t, "string from sub-sub command", secondString)
- })
-
- t.Run("BindFlags_FromYAML_SameFlagName_Root", func(t *testing.T) {
- assertClearEnv(t)
- defer clearEnvVars(t)
-
- yamlConfig := []byte(`
-test-string: global-string-value
-subCommand:
- dummy-string: string-from-sub-command
-`)
-
- cmd := &cobra.Command{}
- v := getViper()
- v.SetConfigType("yaml")
- assert.NoError(t, v.ReadConfig(bytes.NewBuffer(yamlConfig)))
-
- var (
- testStringRoot string
- testStringSub string
- )
-
- subCmd := &cobra.Command{
- Use: "subCommand",
- }
- cmd.AddCommand(subCmd)
-
- cmd.PersistentFlags().StringVar(&testStringRoot, "test-string", "", "Test string flag")
- subCmd.PersistentFlags().StringVar(&testStringSub, "test-string", "", "Test string flag")
-
- err := utils.BindFlags(cmd, v, envVarPrefix)
- assert.NoError(t, err)
-
- assert.Equal(t, "global-string-value", testStringRoot)
- assert.Equal(t, "", testStringSub)
- })
-
- t.Run("BindFlags_FromYAML_SameFlagName_SubCmd", func(t *testing.T) {
- assertClearEnv(t)
- defer clearEnvVars(t)
-
- yamlConfig := []byte(`
-test-string: global-string-value
-subCommand:
- test-string: string-from-sub-command
-`)
-
- cmd := &cobra.Command{}
- v := getViper()
- v.SetConfigType("yaml")
- assert.NoError(t, v.ReadConfig(bytes.NewBuffer(yamlConfig)))
-
- var (
- testStringRoot string
- testStringSub string
- )
-
- subCmd := &cobra.Command{
- Use: "subCommand",
- }
-
- cmd.PersistentFlags().StringVar(&testStringRoot, "test-string", "", "Test string flag")
- subCmd.PersistentFlags().StringVar(&testStringSub, "test-string", "", "Test string flag")
-
- cmd.AddCommand(subCmd)
-
- err := utils.BindFlags(cmd, v, envVarPrefix)
- assert.NoError(t, err)
-
- assert.Equal(t, "global-string-value", testStringRoot)
- assert.Equal(t, "string-from-sub-command", testStringSub)
- })
-
- t.Run("BindFlags_FromJSON", func(t *testing.T) {
- assertClearEnv(t)
- defer clearEnvVars(t)
-
- jsonConfig := []byte(`
- {
- "global-string": "global-string-value",
- "subCommand": {
- "test-string": "string-from-sub-command"
- }
- }`)
-
- cmd := &cobra.Command{}
- v := getViper()
- v.SetConfigType("json")
- assert.NoError(t, v.ReadConfig(bytes.NewBuffer(jsonConfig)))
-
- subCmd := &cobra.Command{
- Use: "subCommand",
- }
- cmd.AddCommand(subCmd)
-
- globalString := cmd.PersistentFlags().String("global-string", "", "Global string flag")
- testString := subCmd.PersistentFlags().String("test-string", "", "Test string flag")
-
- err := utils.BindFlags(cmd, v, envVarPrefix)
- assert.NoError(t, err)
-
- assert.Equal(t, "global-string-value", *globalString)
- assert.Equal(t, "string-from-sub-command", *testString)
- })
-}
-
-func TestEndToEndWithExecute(t *testing.T) {
- configFlagName := "config"
-
- testCases := []struct {
- name string
- args []string
- envVars map[string]string
- config []byte
- configFormat string
- }{
- {
- name: "from env vars",
- args: []string{"subcommand"},
- envVars: map[string]string{"TEST_STRING": "env-value", "TEST_INT": "123", "SUBCOMMAND_TEST_BOOL": "true"},
- },
- {
- name: "from argument",
- args: []string{"subcommand", "--test-string", "argument-value", "--test-int", "123", "--test-bool", "true"},
- },
- {
- name: "from config",
- args: []string{"subcommand"},
- config: []byte(`
-test-string: config-value
-test-int: 123
-subcommand:
- test-bool: true
-`),
- configFormat: "yaml",
- },
- {
- name: "from argument and env vars",
- args: []string{"subcommand", "--test-string", "argument-value"},
- envVars: map[string]string{
- "TEST_INT": "123",
- "SUBCOMMAND_TEST_BOOL": "true",
- },
- },
- {
- name: "from env vars and config",
- args: []string{"subcommand"},
- envVars: map[string]string{
- "TEST_STRING": "env-value",
- },
- config: []byte(`
-test-int: 123
-subcommand:
- test-bool: true
-`),
- configFormat: "yaml",
- },
- {
- name: "from JSON config",
- args: []string{"subcommand"},
- config: []byte(`
- {
- "test-string": "config-value",
- "test-int": 123,
- "subcommand": {
- "test-bool": true
- }
- }`),
- configFormat: "json",
- },
- }
-
- var cmd *cobra.Command
- var v *viper.Viper
-
- cobra.OnInitialize(func() {
- configFilePath, err := cmd.Flags().GetString(configFlagName)
- if err != nil {
- cobra.CheckErr(err)
- }
- err = utils.LoadConfig(v, configFilePath)
- assert.NoError(t, err)
- err = utils.BindFlags(cmd, v, envVarPrefix)
- assert.NoError(t, err)
- })
-
- for _, tc := range testCases {
- t.Run(tc.name, func(t *testing.T) {
- assertClearEnv(t)
- for key, value := range tc.envVars {
- err := setEnv(envVarPrefix+"_"+key, value)
- assert.NoError(t, err)
- }
- defer clearEnvVars(t)
-
- var configFileName string
- if tc.config != nil {
- configFileName = writeTempFile(t, tc.config, tc.configFormat)
- defer os.Remove(configFileName)
-
- tc.args = append(tc.args, "--"+configFlagName, configFileName)
- }
-
- cmd = &cobra.Command{
- Use: "root",
- }
- testString := cmd.PersistentFlags().String("test-string", "", "Test string flag")
- testInt := cmd.PersistentFlags().Int("test-int", 0, "Test int flag")
- assert.NoError(t, cmd.MarkPersistentFlagRequired("test-string"))
- cmd.PersistentFlags().String(configFlagName, "", "Config file name")
-
- var subcommandBool bool
- var subCommandExecuted bool
- subCmd := &cobra.Command{
- Use: "subcommand",
- Run: func(cmd *cobra.Command, args []string) {
- assert.NotEmpty(t, *testString)
- assert.NotEmpty(t, *testInt)
- assert.NotEmpty(t, subcommandBool)
- subCommandExecuted = true
- },
- }
- subCmd.Flags().BoolVar(&subcommandBool, "test-bool", false, "Subcommand string flag")
- cmd.AddCommand(subCmd)
-
- v = getViper()
-
- cmd.SetArgs(tc.args)
- err := cmd.Execute()
- assert.NoError(t, err)
-
- assert.True(t, subCommandExecuted)
- subCommandExecuted = false
- })
- }
-}
-
-var envKeys []string
-
-func assertClearEnv(t *testing.T) {
- assert.Len(t, envKeys, 0)
-}
-
-func setEnv(key, value string) error {
- envKeys = append(envKeys, key)
- return os.Setenv(key, value)
-}
-
-func clearEnvVars(t *testing.T) {
- for len(envKeys) > 0 {
- key := envKeys[0]
- err := os.Unsetenv(key)
- assert.NoError(t, err)
- envKeys = envKeys[1:]
- }
-}
-
-func writeTempFile(t *testing.T, content []byte, fileExtension string) string {
- file, err := os.CreateTemp("", "config-*."+fileExtension)
- assert.NoError(t, err)
-
- _, err = file.Write([]byte(content))
- assert.NoError(t, err)
- assert.NoError(t, file.Close())
-
- return file.Name()
-}
-
-func getViper() *viper.Viper {
- v := viper.New()
- v.SetEnvPrefix(envVarPrefix)
-
- return v
-}
+package utils_test
+
+import (
+ "bytes"
+ "os"
+ "strings"
+ "testing"
+
+ "github.com/checkmarx/2ms/lib/utils"
+ "github.com/spf13/cobra"
+ "github.com/spf13/viper"
+ "github.com/stretchr/testify/assert"
+)
+
+const envVarPrefix = "PREFIX"
+
+func TestBindFlags(t *testing.T) {
+ t.Run("BindFlags_TestEmptyViper", func(t *testing.T) {
+ assertClearEnv(t)
+ defer clearEnvVars(t)
+
+ cmd := &cobra.Command{}
+ v := getViper()
+
+ var (
+ testString string
+ testInt int
+ testBool bool
+ testFloat64 float64
+ )
+
+ cmd.PersistentFlags().StringVar(&testString, "test-string", "", "Test string flag")
+ cmd.PersistentFlags().IntVar(&testInt, "test-int", 0, "Test int flag")
+ cmd.PersistentFlags().BoolVar(&testBool, "test-bool", false, "Test bool flag")
+ cmd.PersistentFlags().Float64Var(&testFloat64, "test-float64", 0.0, "Test float64 flag")
+
+ err := utils.BindFlags(cmd, v, envVarPrefix)
+ assert.NoError(t, err)
+
+ assert.Empty(t, testString)
+ assert.Empty(t, testInt)
+ assert.Empty(t, testBool)
+ assert.Empty(t, testFloat64)
+ })
+
+ t.Run("BindFlags_FromEnvVarsToCobraCommand", func(t *testing.T) {
+ assertClearEnv(t)
+ defer clearEnvVars(t)
+
+ cmd := &cobra.Command{}
+ v := getViper()
+ v.SetEnvPrefix(envVarPrefix)
+
+ var (
+ testString string
+ testInt int
+ testBool bool
+ testFloat64 float64
+ )
+
+ cmd.PersistentFlags().StringVar(&testString, "test-string", "", "Test string flag")
+ cmd.PersistentFlags().IntVar(&testInt, "test-int", 0, "Test int flag")
+ cmd.PersistentFlags().BoolVar(&testBool, "test-bool", false, "Test bool flag")
+ cmd.PersistentFlags().Float64Var(&testFloat64, "test-float64", 0.0, "Test float64 flag")
+
+ err := setEnv("PREFIX_TEST_STRING", "test-string-value")
+ assert.NoError(t, err)
+ err = setEnv("PREFIX_TEST_INT", "456")
+ assert.NoError(t, err)
+ err = setEnv("PREFIX_TEST_BOOL", "true")
+ assert.NoError(t, err)
+ err = setEnv("PREFIX_TEST_FLOAT64", "1.23")
+ assert.NoError(t, err)
+
+ err = utils.BindFlags(cmd, v, envVarPrefix)
+ assert.NoError(t, err)
+
+ assert.Equal(t, "test-string-value", testString)
+ assert.Equal(t, 456, testInt)
+ assert.Equal(t, true, testBool)
+ assert.Equal(t, 1.23, testFloat64)
+ })
+
+ t.Run("BindFlags_NonPersistentFlags", func(t *testing.T) {
+ assertClearEnv(t)
+ defer clearEnvVars(t)
+
+ cmd := &cobra.Command{}
+ v := getViper()
+
+ var (
+ testString string
+ )
+
+ cmd.Flags().StringVar(&testString, "test-string", "", "Test string flag")
+
+ err := setEnv("PREFIX_TEST_STRING", "test-string-value")
+ assert.NoError(t, err)
+
+ err = utils.BindFlags(cmd, v, envVarPrefix)
+ assert.NoError(t, err)
+
+ assert.Equal(t, "test-string-value", testString)
+ })
+
+ t.Run("BindFlags_Subcommand", func(t *testing.T) {
+ assertClearEnv(t)
+ defer clearEnvVars(t)
+
+ var (
+ testString string
+ testInt int
+ )
+
+ subCommand := &cobra.Command{
+ Use: "subCommand",
+ }
+ subCommand.Flags().StringVar(&testString, "test-string", "", "Test string flag")
+ subCommand.PersistentFlags().IntVar(&testInt, "test-int", 0, "Test int flag")
+
+ cmd := &cobra.Command{}
+ cmd.AddCommand(subCommand)
+ v := getViper()
+
+ err := setEnv("PREFIX_SUBCOMMAND_TEST_STRING", "test-string-value")
+ assert.NoError(t, err)
+ err = setEnv("PREFIX_SUBCOMMAND_TEST_INT", "456")
+ assert.NoError(t, err)
+
+ err = utils.BindFlags(cmd, v, envVarPrefix)
+ assert.NoError(t, err)
+
+ assert.Equal(t, "test-string-value", testString)
+ assert.Equal(t, 456, testInt)
+ })
+
+ t.Run("BindFlags_ArrayFlag", func(t *testing.T) {
+ assertClearEnv(t)
+ defer clearEnvVars(t)
+
+ arr := []string{"test", "array", "flag"}
+
+ cmd := &cobra.Command{}
+ v := getViper()
+
+ var (
+ // testArraySpaces []string
+ testArrayCommas []string
+ )
+
+ // cmd.PersistentFlags().StringSliceVar(&testArraySpaces, "test-array-spaces", []string{}, "Test array flag")
+ cmd.PersistentFlags().StringSliceVar(&testArrayCommas, "test-array-commas", []string{}, "Test array flag")
+
+ // err := setEnv("PREFIX_TEST_ARRAY_SPACES", strings.Join(arr, " "))
+ // assert.NoError(t, err)
+ err := setEnv("PREFIX_TEST_ARRAY_COMMAS", strings.Join(arr, ","))
+ assert.NoError(t, err)
+
+ err = utils.BindFlags(cmd, v, envVarPrefix)
+ assert.NoError(t, err)
+
+ // assert.Equal(t, testArraySpaces, arr)
+ assert.Equal(t, arr, testArrayCommas)
+ })
+
+ t.Run("BindFlags_ReturnsErrorForUnknownConfigurationKeys", func(t *testing.T) {
+ t.Skip("Not sure if we need this feature.")
+ assertClearEnv(t)
+ defer clearEnvVars(t)
+
+ cmd := &cobra.Command{}
+ v := getViper()
+
+ var (
+ testString string
+ )
+
+ cmd.PersistentFlags().StringVar(&testString, "test-string", "", "Test string flag")
+
+ v.Set("unknown-key", "unknown-value")
+
+ err := utils.BindFlags(cmd, v, envVarPrefix)
+
+ assert.EqualError(t, err, "unknown configuration key: 'unknown-key'\nShowing help for '' command")
+ })
+
+ t.Run("BindFlags_LowerCaseEnvVars", func(t *testing.T) {
+ assertClearEnv(t)
+ defer clearEnvVars(t)
+
+ cmd := &cobra.Command{}
+ v := getViper()
+
+ var (
+ testString string
+ )
+
+ cmd.PersistentFlags().StringVar(&testString, "test-string", "", "Test string flag")
+
+ err := setEnv("prefix_test_string", "test-string-value")
+ assert.NoError(t, err)
+
+ err = utils.BindFlags(cmd, v, envVarPrefix)
+ assert.NoError(t, err)
+
+ assert.Equal(t, "test-string-value", testString)
+ })
+
+ t.Run("BindFlags_OneWordFlagName", func(t *testing.T) {
+ assertClearEnv(t)
+ defer clearEnvVars(t)
+
+ cmd := &cobra.Command{}
+ v := getViper()
+
+ var (
+ testString string
+ )
+
+ cmd.Flags().StringVar(&testString, "teststring", "", "Test string flag")
+
+ err := setEnv("prefix_teststring", "test-string-value")
+ assert.NoError(t, err)
+
+ err = utils.BindFlags(cmd, v, envVarPrefix)
+ assert.NoError(t, err)
+
+ assert.Equal(t, "test-string-value", testString)
+ })
+
+ t.Run("BindFlags_SameFlagNameDifferentCmd", func(t *testing.T) {
+
+ assertClearEnv(t)
+ defer clearEnvVars(t)
+
+ rootCmd := &cobra.Command{
+ Use: "root",
+ }
+ cmd1 := &cobra.Command{
+ Use: "cmd1",
+ }
+ cmd2 := &cobra.Command{
+ Use: "cmd2",
+ }
+ v := getViper()
+
+ var (
+ testStringRoot string
+ testStringPersistentRoot string
+ testString1 string
+ testStringPersistent1 string
+ testString2 string
+ testStringPersistent2 string
+ )
+
+ rootCmd.Flags().StringVar(&testStringRoot, "test-string", "", "Test string flag")
+ rootCmd.PersistentFlags().StringVar(&testStringPersistentRoot, "test-string-persistent", "", "Test string flag")
+ cmd1.Flags().StringVar(&testString1, "test-string", "", "Test string flag")
+ cmd1.PersistentFlags().StringVar(&testStringPersistent1, "test-string-persistent", "", "Test string flag")
+ cmd2.Flags().StringVar(&testString2, "test-string", "", "Test string flag")
+ cmd2.PersistentFlags().StringVar(&testStringPersistent2, "test-string-persistent", "", "Test string flag")
+
+ rootCmd.AddCommand(cmd1)
+ rootCmd.AddCommand(cmd2)
+
+ err := setEnv("prefix_test_string", "test-string-value")
+ assert.NoError(t, err)
+ err = setEnv("prefix_test_string_persistent", "test-string-persistent-value")
+ assert.NoError(t, err)
+ err = setEnv("prefix_cmd1_test_string", "test-string-value-cmd1")
+ assert.NoError(t, err)
+ err = setEnv("prefix_cmd1_test_string_persistent", "test-string-persistent-value-cmd1")
+ assert.NoError(t, err)
+ err = setEnv("prefix_cmd2_test_string", "test-string-value-cmd2")
+ assert.NoError(t, err)
+ err = setEnv("prefix_cmd2_test_string_persistent", "test-string-persistent-value-cmd2")
+ assert.NoError(t, err)
+
+ err = utils.BindFlags(rootCmd, v, envVarPrefix)
+ assert.NoError(t, err)
+
+ assert.Equal(t, "test-string-value", testStringRoot)
+ assert.Equal(t, "test-string-persistent-value", testStringPersistentRoot)
+ assert.Equal(t, "test-string-value-cmd1", testString1)
+ assert.Equal(t, "test-string-persistent-value-cmd1", testStringPersistent1)
+ assert.Equal(t, "test-string-value-cmd2", testString2)
+ assert.Equal(t, "test-string-persistent-value-cmd2", testStringPersistent2)
+ })
+
+ t.Run("BindFlags_FromYAML_RootCMD", func(t *testing.T) {
+ assertClearEnv(t)
+ defer clearEnvVars(t)
+
+ yamlConfig := []byte(`
+test-string: test-string-value
+test-int: 123
+test-bool: true
+test-array:
+ - test
+ - array
+ - flag
+test-float: 123.456
+`)
+
+ cmd := &cobra.Command{}
+ v := getViper()
+ v.SetConfigType("yaml")
+ assert.NoError(t, v.ReadConfig(bytes.NewBuffer(yamlConfig)))
+
+ var (
+ testString string
+ testInt int
+ testBool bool
+ testArray []string
+ testFloat float64
+ )
+
+ cmd.PersistentFlags().StringVar(&testString, "test-string", "", "Test string flag")
+ cmd.Flags().IntVar(&testInt, "test-int", 0, "Test int flag")
+ cmd.PersistentFlags().BoolVar(&testBool, "test-bool", false, "Test bool flag")
+ cmd.Flags().StringSliceVar(&testArray, "test-array", []string{}, "Test array flag")
+ cmd.PersistentFlags().Float64Var(&testFloat, "test-float", 0, "Test float flag")
+
+ err := utils.BindFlags(cmd, v, envVarPrefix)
+ assert.NoError(t, err)
+
+ assert.Equal(t, "test-string-value", testString)
+ assert.Equal(t, 123, testInt)
+ assert.Equal(t, true, testBool)
+ assert.Equal(t, []string{"test", "array", "flag"}, testArray)
+ assert.Equal(t, 123.456, testFloat)
+ })
+
+ t.Run("BindFlags_FromYAML_StringArrayVar", func(t *testing.T) {
+ assertClearEnv(t)
+ defer clearEnvVars(t)
+
+ yamlConfig := []byte(`
+regex:
+ - test\=
+ - array\=
+ - flag\=
+another-regex: [test\=, array\=, flag\=]
+`)
+
+ cmd := &cobra.Command{}
+ v := getViper()
+ v.SetConfigType("yaml")
+ assert.NoError(t, v.ReadConfig(bytes.NewBuffer(yamlConfig)))
+
+ var testArray []string
+ cmd.Flags().StringArrayVar(&testArray, "regex", []string{}, "Test array flag")
+ cmd.Flags().StringArrayVar(&testArray, "another-regex", []string{}, "Test array flag")
+
+ err := utils.BindFlags(cmd, v, envVarPrefix)
+ assert.NoError(t, err)
+
+ assert.Equal(t, []string{"test\\=", "array\\=", "flag\\="}, testArray)
+ assert.Equal(t, []string{"test\\=", "array\\=", "flag\\="}, testArray)
+ })
+
+ t.Run("BindFlags_FromYAML_SubCMD", func(t *testing.T) {
+ assertClearEnv(t)
+ defer clearEnvVars(t)
+
+ yamlConfig := []byte(`
+global-string: global-string-value
+subCommand:
+ test-string: test-string-value
+ test-int: 123
+ test-bool: true
+`)
+
+ cmd := &cobra.Command{}
+ v := getViper()
+ v.SetConfigType("yaml")
+ assert.NoError(t, v.ReadConfig(bytes.NewBuffer(yamlConfig)))
+
+ var (
+ globalString string
+ testString string
+ testInt int
+ testBool bool
+ )
+
+ cmd.PersistentFlags().StringVar(&globalString, "global-string", "", "Global string flag")
+ subCmd := &cobra.Command{
+ Use: "subCommand",
+ }
+ cmd.AddCommand(subCmd)
+ subCmd.PersistentFlags().StringVar(&testString, "test-string", "", "Test string flag")
+ subCmd.Flags().IntVar(&testInt, "test-int", 0, "Test int flag")
+ subCmd.PersistentFlags().BoolVar(&testBool, "test-bool", false, "Test bool flag")
+
+ err := utils.BindFlags(cmd, v, envVarPrefix)
+ assert.NoError(t, err)
+
+ assert.Equal(t, "global-string-value", globalString)
+ assert.Equal(t, "test-string-value", testString)
+ assert.Equal(t, 123, testInt)
+ assert.Equal(t, true, testBool)
+ })
+
+ t.Run("BindFlags_FromYAML_SubCMD_WithEnvVars", func(t *testing.T) {
+ assertClearEnv(t)
+ defer clearEnvVars(t)
+
+ yamlConfig := []byte(`
+global-string: global-string-value
+subCommand:
+ test-string: test-string-value
+ test-int: 123
+ test-bool: true
+`)
+ cmd := &cobra.Command{}
+ v := getViper()
+ v.SetConfigType("yaml")
+ assert.NoError(t, v.ReadConfig(bytes.NewBuffer(yamlConfig)))
+
+ var (
+ globalString string
+ testString string
+ testInt int
+ testBool bool
+ )
+
+ cmd.PersistentFlags().StringVar(&globalString, "global-string", "", "Global string flag")
+ subCmd := &cobra.Command{
+ Use: "subCommand",
+ }
+ cmd.AddCommand(subCmd)
+ subCmd.PersistentFlags().StringVar(&testString, "test-string", "", "Test string flag")
+ subCmd.Flags().IntVar(&testInt, "test-int", 0, "Test int flag")
+ subCmd.PersistentFlags().BoolVar(&testBool, "test-bool", false, "Test bool flag")
+
+ err := setEnv("PREFIX_GLOBAL_STRING", "global-string-value-from-env")
+ assert.NoError(t, err)
+ err = setEnv("PREFIX_SUBCOMMAND_TEST_STRING", "test-string-value-from-env")
+ assert.NoError(t, err)
+
+ err = utils.BindFlags(cmd, v, envVarPrefix)
+ assert.NoError(t, err)
+
+ assert.Equal(t, "global-string-value-from-env", globalString)
+ assert.Equal(t, "test-string-value-from-env", testString)
+ assert.Equal(t, 123, testInt)
+ assert.Equal(t, true, testBool)
+ })
+
+ t.Run("BindFlags_FromYAML_SubSubCmd", func(t *testing.T) {
+ assertClearEnv(t)
+ defer clearEnvVars(t)
+
+ yamlConfig := []byte(`
+global-string: global-string-value
+subCommand:
+ first-string: string-from-sub-command
+ subSubCommand:
+ second-string: string from sub-sub command
+`)
+ cmd := &cobra.Command{}
+ v := getViper()
+ v.SetConfigType("yaml")
+ assert.NoError(t, v.ReadConfig(bytes.NewBuffer(yamlConfig)))
+
+ var (
+ globalString string
+ firstString string
+ secondString string
+ )
+
+ subSubCmd := &cobra.Command{
+ Use: "subSubCommand",
+ }
+ subCmd := &cobra.Command{
+ Use: "subCommand",
+ }
+ subCmd.AddCommand(subSubCmd)
+ cmd.AddCommand(subCmd)
+ cmd.PersistentFlags().StringVar(&globalString, "global-string", "", "Global string flag")
+ subCmd.PersistentFlags().StringVar(&firstString, "first-string", "", "Test string flag")
+ subSubCmd.Flags().StringVar(&secondString, "second-string", "", "Test string flag")
+
+ err := utils.BindFlags(cmd, v, envVarPrefix)
+ assert.NoError(t, err)
+
+ assert.Equal(t, "global-string-value", globalString)
+ assert.Equal(t, "string-from-sub-command", firstString)
+ assert.Equal(t, "string from sub-sub command", secondString)
+ })
+
+ t.Run("BindFlags_FromYAML_SameFlagName_Root", func(t *testing.T) {
+ assertClearEnv(t)
+ defer clearEnvVars(t)
+
+ yamlConfig := []byte(`
+test-string: global-string-value
+subCommand:
+ dummy-string: string-from-sub-command
+`)
+
+ cmd := &cobra.Command{}
+ v := getViper()
+ v.SetConfigType("yaml")
+ assert.NoError(t, v.ReadConfig(bytes.NewBuffer(yamlConfig)))
+
+ var (
+ testStringRoot string
+ testStringSub string
+ )
+
+ subCmd := &cobra.Command{
+ Use: "subCommand",
+ }
+ cmd.AddCommand(subCmd)
+
+ cmd.PersistentFlags().StringVar(&testStringRoot, "test-string", "", "Test string flag")
+ subCmd.PersistentFlags().StringVar(&testStringSub, "test-string", "", "Test string flag")
+
+ err := utils.BindFlags(cmd, v, envVarPrefix)
+ assert.NoError(t, err)
+
+ assert.Equal(t, "global-string-value", testStringRoot)
+ assert.Equal(t, "", testStringSub)
+ })
+
+ t.Run("BindFlags_FromYAML_SameFlagName_SubCmd", func(t *testing.T) {
+ assertClearEnv(t)
+ defer clearEnvVars(t)
+
+ yamlConfig := []byte(`
+test-string: global-string-value
+subCommand:
+ test-string: string-from-sub-command
+`)
+
+ cmd := &cobra.Command{}
+ v := getViper()
+ v.SetConfigType("yaml")
+ assert.NoError(t, v.ReadConfig(bytes.NewBuffer(yamlConfig)))
+
+ var (
+ testStringRoot string
+ testStringSub string
+ )
+
+ subCmd := &cobra.Command{
+ Use: "subCommand",
+ }
+
+ cmd.PersistentFlags().StringVar(&testStringRoot, "test-string", "", "Test string flag")
+ subCmd.PersistentFlags().StringVar(&testStringSub, "test-string", "", "Test string flag")
+
+ cmd.AddCommand(subCmd)
+
+ err := utils.BindFlags(cmd, v, envVarPrefix)
+ assert.NoError(t, err)
+
+ assert.Equal(t, "global-string-value", testStringRoot)
+ assert.Equal(t, "string-from-sub-command", testStringSub)
+ })
+
+ t.Run("BindFlags_FromJSON", func(t *testing.T) {
+ assertClearEnv(t)
+ defer clearEnvVars(t)
+
+ jsonConfig := []byte(`
+ {
+ "global-string": "global-string-value",
+ "subCommand": {
+ "test-string": "string-from-sub-command"
+ }
+ }`)
+
+ cmd := &cobra.Command{}
+ v := getViper()
+ v.SetConfigType("json")
+ assert.NoError(t, v.ReadConfig(bytes.NewBuffer(jsonConfig)))
+
+ subCmd := &cobra.Command{
+ Use: "subCommand",
+ }
+ cmd.AddCommand(subCmd)
+
+ globalString := cmd.PersistentFlags().String("global-string", "", "Global string flag")
+ testString := subCmd.PersistentFlags().String("test-string", "", "Test string flag")
+
+ err := utils.BindFlags(cmd, v, envVarPrefix)
+ assert.NoError(t, err)
+
+ assert.Equal(t, "global-string-value", *globalString)
+ assert.Equal(t, "string-from-sub-command", *testString)
+ })
+}
+
+func TestEndToEndWithExecute(t *testing.T) {
+ configFlagName := "config"
+
+ testCases := []struct {
+ name string
+ args []string
+ envVars map[string]string
+ config []byte
+ configFormat string
+ }{
+ {
+ name: "from env vars",
+ args: []string{"subcommand"},
+ envVars: map[string]string{"TEST_STRING": "env-value", "TEST_INT": "123", "SUBCOMMAND_TEST_BOOL": "true"},
+ },
+ {
+ name: "from argument",
+ args: []string{"subcommand", "--test-string", "argument-value", "--test-int", "123", "--test-bool", "true"},
+ },
+ {
+ name: "from config",
+ args: []string{"subcommand"},
+ config: []byte(`
+test-string: config-value
+test-int: 123
+subcommand:
+ test-bool: true
+`),
+ configFormat: "yaml",
+ },
+ {
+ name: "from argument and env vars",
+ args: []string{"subcommand", "--test-string", "argument-value"},
+ envVars: map[string]string{
+ "TEST_INT": "123",
+ "SUBCOMMAND_TEST_BOOL": "true",
+ },
+ },
+ {
+ name: "from env vars and config",
+ args: []string{"subcommand"},
+ envVars: map[string]string{
+ "TEST_STRING": "env-value",
+ },
+ config: []byte(`
+test-int: 123
+subcommand:
+ test-bool: true
+`),
+ configFormat: "yaml",
+ },
+ {
+ name: "from JSON config",
+ args: []string{"subcommand"},
+ config: []byte(`
+ {
+ "test-string": "config-value",
+ "test-int": 123,
+ "subcommand": {
+ "test-bool": true
+ }
+ }`),
+ configFormat: "json",
+ },
+ }
+
+ var cmd *cobra.Command
+ var v *viper.Viper
+
+ cobra.OnInitialize(func() {
+ configFilePath, err := cmd.Flags().GetString(configFlagName)
+ if err != nil {
+ cobra.CheckErr(err)
+ }
+ err = utils.LoadConfig(v, configFilePath)
+ assert.NoError(t, err)
+ err = utils.BindFlags(cmd, v, envVarPrefix)
+ assert.NoError(t, err)
+ })
+
+ for _, tc := range testCases {
+ t.Run(tc.name, func(t *testing.T) {
+ assertClearEnv(t)
+ for key, value := range tc.envVars {
+ err := setEnv(envVarPrefix+"_"+key, value)
+ assert.NoError(t, err)
+ }
+ defer clearEnvVars(t)
+
+ var configFileName string
+ if tc.config != nil {
+ configFileName = writeTempFile(t, tc.config, tc.configFormat)
+ defer os.Remove(configFileName)
+
+ tc.args = append(tc.args, "--"+configFlagName, configFileName)
+ }
+
+ cmd = &cobra.Command{
+ Use: "root",
+ }
+ testString := cmd.PersistentFlags().String("test-string", "", "Test string flag")
+ testInt := cmd.PersistentFlags().Int("test-int", 0, "Test int flag")
+ assert.NoError(t, cmd.MarkPersistentFlagRequired("test-string"))
+ cmd.PersistentFlags().String(configFlagName, "", "Config file name")
+
+ var subcommandBool bool
+ var subCommandExecuted bool
+ subCmd := &cobra.Command{
+ Use: "subcommand",
+ Run: func(cmd *cobra.Command, args []string) {
+ assert.NotEmpty(t, *testString)
+ assert.NotEmpty(t, *testInt)
+ assert.NotEmpty(t, subcommandBool)
+ subCommandExecuted = true
+ },
+ }
+ subCmd.Flags().BoolVar(&subcommandBool, "test-bool", false, "Subcommand string flag")
+ cmd.AddCommand(subCmd)
+
+ v = getViper()
+
+ cmd.SetArgs(tc.args)
+ err := cmd.Execute()
+ assert.NoError(t, err)
+
+ assert.True(t, subCommandExecuted)
+ subCommandExecuted = false
+ })
+ }
+}
+
+var envKeys []string
+
+func assertClearEnv(t *testing.T) {
+ assert.Len(t, envKeys, 0)
+}
+
+func setEnv(key, value string) error {
+ envKeys = append(envKeys, key)
+ return os.Setenv(key, value)
+}
+
+func clearEnvVars(t *testing.T) {
+ for len(envKeys) > 0 {
+ key := envKeys[0]
+ err := os.Unsetenv(key)
+ assert.NoError(t, err)
+ envKeys = envKeys[1:]
+ }
+}
+
+func writeTempFile(t *testing.T, content []byte, fileExtension string) string {
+ file, err := os.CreateTemp("", "config-*."+fileExtension)
+ assert.NoError(t, err)
+
+ _, err = file.Write([]byte(content))
+ assert.NoError(t, err)
+ assert.NoError(t, file.Close())
+
+ return file.Name()
+}
+
+func getViper() *viper.Viper {
+ v := viper.New()
+ v.SetEnvPrefix(envVarPrefix)
+
+ return v
+}
diff --git a/lib/utils/http.go b/lib/utils/http.go
index f0c9648c..df3580b7 100644
--- a/lib/utils/http.go
+++ b/lib/utils/http.go
@@ -1,66 +1,66 @@
-package utils
-
-import (
- "encoding/base64"
- "fmt"
- "io"
- "net/http"
-
- "github.com/rs/zerolog/log"
-)
-
-type ICredentials interface {
- GetCredentials() (string, string)
-}
-
-func CreateBasicAuthCredentials(credentials ICredentials) string {
- username, password := credentials.GetCredentials()
- return "Basic " + base64.StdEncoding.EncodeToString([]byte(fmt.Sprintf("%s:%s", username, password)))
-}
-
-type IAuthorizationHeader interface {
- GetAuthorizationHeader() string
-}
-
-type RetrySettings struct {
- MaxRetries int
- ErrorCodes []int
-}
-
-func HttpRequest(method string, url string, authorization IAuthorizationHeader, retry RetrySettings) ([]byte, *http.Response, error) {
- request, err := http.NewRequest(method, url, nil)
- if err != nil {
- return nil, nil, fmt.Errorf("unexpected error creating an http request %w", err)
- }
-
- if authorization.GetAuthorizationHeader() != "" {
- request.Header.Set("Authorization", authorization.GetAuthorizationHeader())
- }
-
- client := &http.Client{}
- response, err := client.Do(request)
- if err != nil {
- return nil, response, fmt.Errorf("unable to send http request %w", err)
- }
-
- defer response.Body.Close()
-
- if response.StatusCode < 200 || response.StatusCode >= 300 {
- if retry.MaxRetries > 0 {
- for _, code := range retry.ErrorCodes {
- if response.StatusCode == code {
- log.Warn().Msgf("retrying http request %v", url)
- return HttpRequest(method, url, authorization, RetrySettings{MaxRetries: retry.MaxRetries - 1, ErrorCodes: retry.ErrorCodes})
- }
- }
- }
- return nil, response, fmt.Errorf("error calling http url \"%v\". status code: %v", url, response)
- }
-
- body, err := io.ReadAll(response.Body)
- if err != nil {
- return nil, response, fmt.Errorf("unexpected error reading http response body %w", err)
- }
-
- return body, response, nil
-}
+package utils
+
+import (
+ "encoding/base64"
+ "fmt"
+ "io"
+ "net/http"
+
+ "github.com/rs/zerolog/log"
+)
+
+type ICredentials interface {
+ GetCredentials() (string, string)
+}
+
+func CreateBasicAuthCredentials(credentials ICredentials) string {
+ username, password := credentials.GetCredentials()
+ return "Basic " + base64.StdEncoding.EncodeToString([]byte(fmt.Sprintf("%s:%s", username, password)))
+}
+
+type IAuthorizationHeader interface {
+ GetAuthorizationHeader() string
+}
+
+type RetrySettings struct {
+ MaxRetries int
+ ErrorCodes []int
+}
+
+func HttpRequest(method string, url string, authorization IAuthorizationHeader, retry RetrySettings) ([]byte, *http.Response, error) {
+ request, err := http.NewRequest(method, url, nil)
+ if err != nil {
+ return nil, nil, fmt.Errorf("unexpected error creating an http request %w", err)
+ }
+
+ if authorization.GetAuthorizationHeader() != "" {
+ request.Header.Set("Authorization", authorization.GetAuthorizationHeader())
+ }
+
+ client := &http.Client{}
+ response, err := client.Do(request)
+ if err != nil {
+ return nil, response, fmt.Errorf("unable to send http request %w", err)
+ }
+
+ defer response.Body.Close()
+
+ if response.StatusCode < 200 || response.StatusCode >= 300 {
+ if retry.MaxRetries > 0 {
+ for _, code := range retry.ErrorCodes {
+ if response.StatusCode == code {
+ log.Warn().Msgf("retrying http request %v", url)
+ return HttpRequest(method, url, authorization, RetrySettings{MaxRetries: retry.MaxRetries - 1, ErrorCodes: retry.ErrorCodes})
+ }
+ }
+ }
+ return nil, response, fmt.Errorf("error calling http url \"%v\". status code: %v", url, response)
+ }
+
+ body, err := io.ReadAll(response.Body)
+ if err != nil {
+ return nil, response, fmt.Errorf("unexpected error reading http response body %w", err)
+ }
+
+ return body, response, nil
+}
diff --git a/lib/utils/http_test.go b/lib/utils/http_test.go
index 2497857a..b983962c 100644
--- a/lib/utils/http_test.go
+++ b/lib/utils/http_test.go
@@ -1,115 +1,115 @@
-package utils
-
-import (
- "errors"
- "github.com/stretchr/testify/assert"
- "net/http"
- "net/http/httptest"
- "testing"
-)
-
-type MockAuthorization struct {
- header string
-}
-
-func (m *MockAuthorization) GetAuthorizationHeader() string {
- return m.header
-}
-
-func TestHttpRequest(t *testing.T) {
- tests := []struct {
- name string
- method string
- url string
- statusCode int
- authorization string
- retry RetrySettings
- responseBody string
- bodyError bool
- expectedError error
- }{
- {
- name: "Successful request",
- method: "GET",
- statusCode: http.StatusOK,
- responseBody: "Success",
- },
- {
- name: "Request with authorization",
- method: "GET",
- statusCode: http.StatusOK,
- authorization: "Bearer token123",
- responseBody: "Authorized",
- },
- {
- name: "Retry on failure",
- method: "GET",
- statusCode: http.StatusInternalServerError,
- retry: RetrySettings{MaxRetries: 1, ErrorCodes: []int{http.StatusInternalServerError}},
- expectedError: errors.New("error calling http url"),
- },
- {
- name: "Client error (no retry)",
- method: "GET",
- statusCode: http.StatusBadRequest,
- expectedError: errors.New("error calling http url"),
- },
- {
- name: "Error creating request",
- method: "GET",
- url: "::://invalid-url",
- expectedError: errors.New("unexpected error creating an http request"),
- },
- {
- name: "Error sending request",
- method: "GET",
- url: "http://localhost:9999",
- expectedError: errors.New("unable to send http request"),
- },
- {
- name: "Error reading response body",
- method: "GET",
- statusCode: http.StatusOK,
- bodyError: true,
- expectedError: errors.New("unexpected error reading http response body"),
- },
- }
-
- for _, test := range tests {
- t.Run(test.name, func(t *testing.T) {
- var server *httptest.Server
- if test.url == "" {
- server = httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- if test.authorization != "" {
- assert.Equal(t, test.authorization, r.Header.Get("Authorization"), "Authorization header mismatch")
- }
- w.WriteHeader(test.statusCode)
- if test.bodyError {
- _, err := w.Write([]byte("corrupt data"))
- assert.NoError(t, err)
- w.(http.Flusher).Flush()
- conn, _, _ := w.(http.Hijacker).Hijack()
- err = conn.Close()
- assert.NoError(t, err)
- } else {
- _, _ = w.Write([]byte(test.responseBody))
- }
- }))
- test.url = server.URL
- defer server.Close()
- }
-
- mockAuth := &MockAuthorization{header: test.authorization}
- body, response, err := HttpRequest(test.method, test.url, mockAuth, test.retry)
-
- if test.expectedError != nil {
- assert.Error(t, err, "Expected an error but got none")
- assert.Contains(t, err.Error(), test.expectedError.Error(), "Unexpected error message")
- } else {
- assert.NoError(t, err, "Unexpected error occurred")
- assert.Equal(t, test.statusCode, response.StatusCode, "Unexpected status code")
- assert.Equal(t, test.responseBody, string(body), "Unexpected response body")
- }
- })
- }
-}
+package utils
+
+import (
+ "errors"
+ "github.com/stretchr/testify/assert"
+ "net/http"
+ "net/http/httptest"
+ "testing"
+)
+
+type MockAuthorization struct {
+ header string
+}
+
+func (m *MockAuthorization) GetAuthorizationHeader() string {
+ return m.header
+}
+
+func TestHttpRequest(t *testing.T) {
+ tests := []struct {
+ name string
+ method string
+ url string
+ statusCode int
+ authorization string
+ retry RetrySettings
+ responseBody string
+ bodyError bool
+ expectedError error
+ }{
+ {
+ name: "Successful request",
+ method: "GET",
+ statusCode: http.StatusOK,
+ responseBody: "Success",
+ },
+ {
+ name: "Request with authorization",
+ method: "GET",
+ statusCode: http.StatusOK,
+ authorization: "Bearer token123",
+ responseBody: "Authorized",
+ },
+ {
+ name: "Retry on failure",
+ method: "GET",
+ statusCode: http.StatusInternalServerError,
+ retry: RetrySettings{MaxRetries: 1, ErrorCodes: []int{http.StatusInternalServerError}},
+ expectedError: errors.New("error calling http url"),
+ },
+ {
+ name: "Client error (no retry)",
+ method: "GET",
+ statusCode: http.StatusBadRequest,
+ expectedError: errors.New("error calling http url"),
+ },
+ {
+ name: "Error creating request",
+ method: "GET",
+ url: "::://invalid-url",
+ expectedError: errors.New("unexpected error creating an http request"),
+ },
+ {
+ name: "Error sending request",
+ method: "GET",
+ url: "http://localhost:9999",
+ expectedError: errors.New("unable to send http request"),
+ },
+ {
+ name: "Error reading response body",
+ method: "GET",
+ statusCode: http.StatusOK,
+ bodyError: true,
+ expectedError: errors.New("unexpected error reading http response body"),
+ },
+ }
+
+ for _, test := range tests {
+ t.Run(test.name, func(t *testing.T) {
+ var server *httptest.Server
+ if test.url == "" {
+ server = httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ if test.authorization != "" {
+ assert.Equal(t, test.authorization, r.Header.Get("Authorization"), "Authorization header mismatch")
+ }
+ w.WriteHeader(test.statusCode)
+ if test.bodyError {
+ _, err := w.Write([]byte("corrupt data"))
+ assert.NoError(t, err)
+ w.(http.Flusher).Flush()
+ conn, _, _ := w.(http.Hijacker).Hijack()
+ err = conn.Close()
+ assert.NoError(t, err)
+ } else {
+ _, _ = w.Write([]byte(test.responseBody))
+ }
+ }))
+ test.url = server.URL
+ defer server.Close()
+ }
+
+ mockAuth := &MockAuthorization{header: test.authorization}
+ body, response, err := HttpRequest(test.method, test.url, mockAuth, test.retry)
+
+ if test.expectedError != nil {
+ assert.Error(t, err, "Expected an error but got none")
+ assert.Contains(t, err.Error(), test.expectedError.Error(), "Unexpected error message")
+ } else {
+ assert.NoError(t, err, "Unexpected error occurred")
+ assert.Equal(t, test.statusCode, response.StatusCode, "Unexpected status code")
+ assert.Equal(t, test.responseBody, string(body), "Unexpected response body")
+ }
+ })
+ }
+}
diff --git a/lib/utils/logger.go b/lib/utils/logger.go
index 6699864b..9a6b9067 100644
--- a/lib/utils/logger.go
+++ b/lib/utils/logger.go
@@ -1,40 +1,40 @@
-package utils
-
-import (
- "io"
- "os"
-
- "github.com/rs/zerolog"
-)
-
-type SpecificLevelWriter struct {
- io.Writer
- Levels []zerolog.Level
-}
-
-func (w SpecificLevelWriter) WriteLevel(level zerolog.Level, p []byte) (int, error) {
- for _, l := range w.Levels {
- if l == level {
- return w.Write(p)
- }
- }
- return len(p), nil
-}
-
-func CreateLogger(minimumLevel zerolog.Level) zerolog.Logger {
- writer := zerolog.MultiLevelWriter(
- SpecificLevelWriter{
- Writer: zerolog.ConsoleWriter{Out: os.Stdout, TimeFormat: "15:04:05", NoColor: true},
- Levels: []zerolog.Level{
- zerolog.DebugLevel, zerolog.InfoLevel, zerolog.WarnLevel,
- },
- },
- SpecificLevelWriter{
- Writer: zerolog.ConsoleWriter{Out: os.Stderr, TimeFormat: "15:04:05", NoColor: true},
- Levels: []zerolog.Level{
- zerolog.ErrorLevel, zerolog.FatalLevel, zerolog.PanicLevel,
- },
- },
- )
- return zerolog.New(writer).Level(minimumLevel).With().Timestamp().Logger()
-}
+package utils
+
+import (
+ "io"
+ "os"
+
+ "github.com/rs/zerolog"
+)
+
+type SpecificLevelWriter struct {
+ io.Writer
+ Levels []zerolog.Level
+}
+
+func (w SpecificLevelWriter) WriteLevel(level zerolog.Level, p []byte) (int, error) {
+ for _, l := range w.Levels {
+ if l == level {
+ return w.Write(p)
+ }
+ }
+ return len(p), nil
+}
+
+func CreateLogger(minimumLevel zerolog.Level) zerolog.Logger {
+ writer := zerolog.MultiLevelWriter(
+ SpecificLevelWriter{
+ Writer: zerolog.ConsoleWriter{Out: os.Stdout, TimeFormat: "15:04:05", NoColor: true},
+ Levels: []zerolog.Level{
+ zerolog.DebugLevel, zerolog.InfoLevel, zerolog.WarnLevel,
+ },
+ },
+ SpecificLevelWriter{
+ Writer: zerolog.ConsoleWriter{Out: os.Stderr, TimeFormat: "15:04:05", NoColor: true},
+ Levels: []zerolog.Level{
+ zerolog.ErrorLevel, zerolog.FatalLevel, zerolog.PanicLevel,
+ },
+ },
+ )
+ return zerolog.New(writer).Level(minimumLevel).With().Timestamp().Logger()
+}
diff --git a/main.go b/main.go
index 3366ffb5..7d8e0bfc 100644
--- a/main.go
+++ b/main.go
@@ -1,29 +1,29 @@
-package main
-
-import (
- "os"
- "os/signal"
-
- "github.com/checkmarx/2ms/cmd"
- "github.com/checkmarx/2ms/lib/utils"
- "github.com/rs/zerolog"
- "github.com/rs/zerolog/log"
-)
-
-func main() {
- zerolog.SetGlobalLevel(zerolog.InfoLevel)
- log.Logger = utils.CreateLogger(zerolog.InfoLevel)
-
- // this block sets up a go routine to listen for an interrupt signal
- // which will immediately exit gitleaks
- stopChan := make(chan os.Signal, 1)
- signal.Notify(stopChan, os.Interrupt)
- go listenForInterrupt(stopChan)
-
- cmd.Exit(cmd.Execute())
-}
-
-func listenForInterrupt(stopScan chan os.Signal) {
- <-stopScan
- log.Fatal().Msg("Interrupt signal received. Exiting...") // lint:ignore We want to exit immediately
-}
+package main
+
+import (
+ "os"
+ "os/signal"
+
+ "github.com/checkmarx/2ms/cmd"
+ "github.com/checkmarx/2ms/lib/utils"
+ "github.com/rs/zerolog"
+ "github.com/rs/zerolog/log"
+)
+
+func main() {
+ zerolog.SetGlobalLevel(zerolog.InfoLevel)
+ log.Logger = utils.CreateLogger(zerolog.InfoLevel)
+
+ // this block sets up a go routine to listen for an interrupt signal
+ // which will immediately exit gitleaks
+ stopChan := make(chan os.Signal, 1)
+ signal.Notify(stopChan, os.Interrupt)
+ go listenForInterrupt(stopChan)
+
+ cmd.Exit(cmd.Execute())
+}
+
+func listenForInterrupt(stopScan chan os.Signal) {
+ <-stopScan
+ log.Fatal().Msg("Interrupt signal received. Exiting...") // lint:ignore We want to exit immediately
+}
diff --git a/plugins/confluence.go b/plugins/confluence.go
index 308f6fd1..00058378 100644
--- a/plugins/confluence.go
+++ b/plugins/confluence.go
@@ -1,360 +1,360 @@
-package plugins
-
-import (
- "encoding/json"
- "fmt"
- "net/http"
- "strings"
- "sync"
-
- "github.com/checkmarx/2ms/lib/utils"
- "github.com/rs/zerolog/log"
- "github.com/spf13/cobra"
-
- "net/url"
-)
-
-const (
- argUrl = "url"
- argSpaces = "spaces"
- argUsername = "username"
- argToken = "token"
- argHistory = "history"
- confluenceDefaultWindow = 25
- confluenceMaxRequests = 500
-)
-
-var (
- username string
- token string
-)
-
-type ConfluencePlugin struct {
- Plugin
- Spaces []string
- History bool
- client IConfluenceClient
-
- itemsChan chan ISourceItem
- errorsChan chan error
-}
-
-func (p *ConfluencePlugin) GetName() string {
- return "confluence"
-}
-
-func isValidURL(cmd *cobra.Command, args []string) error {
- urlStr := args[0]
- parsedURL, err := url.Parse(urlStr)
- if err != nil && parsedURL.Scheme != "https" {
- return fmt.Errorf("invalid URL format")
- }
- return nil
-}
-
-func (p *ConfluencePlugin) DefineCommand(items chan ISourceItem, errors chan error) (*cobra.Command, error) {
- p.itemsChan = items
- p.errorsChan = errors
-
- var confluenceCmd = &cobra.Command{
- Use: fmt.Sprintf("%s ", p.GetName()),
- Short: "Scan Confluence server",
- Long: "Scan Confluence server for sensitive information",
- Example: fmt.Sprintf(" 2ms %s https://checkmarx.atlassian.net/wiki", p.GetName()),
- Args: cobra.MatchAll(cobra.ExactArgs(1), isValidURL),
- Run: func(cmd *cobra.Command, args []string) {
- err := p.initialize(args[0])
- if err != nil {
- errors <- fmt.Errorf("error while initializing confluence plugin: %w", err)
- }
- wg := &sync.WaitGroup{}
- p.scanConfluence(wg)
- wg.Wait()
- close(items)
- },
- }
-
- flags := confluenceCmd.Flags()
- flags.StringSliceVar(&p.Spaces, argSpaces, []string{}, "Confluence spaces: The names or IDs of the spaces to scan")
- flags.StringVar(&username, argUsername, "", "Confluence user name or email for authentication")
- flags.StringVar(&token, argToken, "", "The Confluence API token for authentication")
- flags.BoolVar(&p.History, argHistory, false, "Scan pages history")
-
- return confluenceCmd, nil
-}
-
-func (p *ConfluencePlugin) initialize(urlArg string) error {
-
- url := strings.TrimRight(urlArg, "/")
-
- if username == "" || token == "" {
- log.Warn().Msg("confluence credentials were not provided. The scan will be made anonymously only for the public pages")
- }
- p.client = newConfluenceClient(url, token, username)
-
- p.Limit = make(chan struct{}, confluenceMaxRequests)
- return nil
-}
-
-func (p *ConfluencePlugin) scanConfluence(wg *sync.WaitGroup) {
- spaces, err := p.getSpaces()
- if err != nil {
- p.errorsChan <- err
- }
-
- for _, space := range spaces {
- wg.Add(1)
- go p.scanConfluenceSpace(wg, space)
- }
-}
-
-func (p *ConfluencePlugin) scanConfluenceSpace(wg *sync.WaitGroup, space ConfluenceSpaceResult) {
- defer wg.Done()
-
- pages, err := p.getPages(space)
- if err != nil {
- p.errorsChan <- err
- return
- }
-
- for _, page := range pages.Pages {
- wg.Add(1)
- p.Limit <- struct{}{}
- go func(page ConfluencePage) {
- p.scanPageAllVersions(wg, page, space)
- <-p.Limit
- }(page)
- }
-}
-
-func (p *ConfluencePlugin) scanPageAllVersions(wg *sync.WaitGroup, page ConfluencePage, space ConfluenceSpaceResult) {
- defer wg.Done()
-
- previousVersion := p.scanPageVersion(page, space, 0)
- if !p.History {
- return
- }
-
- for previousVersion > 0 {
- previousVersion = p.scanPageVersion(page, space, previousVersion)
- }
-}
-
-func (p *ConfluencePlugin) scanPageVersion(page ConfluencePage, space ConfluenceSpaceResult, version int) int {
- pageContent, err := p.client.getPageContentRequest(page, version)
- if err != nil {
- p.errorsChan <- err
- return 0
- }
- itemID := fmt.Sprintf("%s-%s-%s", p.GetName(), space.Key, page.ID)
- p.itemsChan <- convertPageToItem(pageContent, itemID)
-
- return pageContent.History.PreviousVersion.Number
-}
-
-func convertPageToItem(pageContent *ConfluencePageContent, itemID string) ISourceItem {
- return &item{
- Content: &pageContent.Body.Storage.Value,
- ID: itemID,
- Source: pageContent.Links["base"] + pageContent.Links["webui"],
- }
-}
-
-func (p *ConfluencePlugin) getSpaces() ([]ConfluenceSpaceResult, error) {
- totalSpaces, err := p.client.getSpacesRequest(0)
- if err != nil {
- return nil, err
- }
-
- actualSize := totalSpaces.Size
-
- for actualSize == confluenceDefaultWindow {
- moreSpaces, err := p.client.getSpacesRequest(totalSpaces.Size)
- if err != nil {
- return nil, err
- }
-
- totalSpaces.Results = append(totalSpaces.Results, moreSpaces.Results...)
- totalSpaces.Size += moreSpaces.Size
- actualSize = moreSpaces.Size
- }
-
- if len(p.Spaces) == 0 {
- log.Info().Msgf(" Total of all %d Spaces detected", len(totalSpaces.Results))
- return totalSpaces.Results, nil
- }
-
- filteredSpaces := make([]ConfluenceSpaceResult, 0)
- if len(p.Spaces) > 0 {
- for _, space := range totalSpaces.Results {
- for _, spaceToScan := range p.Spaces {
- if space.Key == spaceToScan || space.Name == spaceToScan || fmt.Sprintf("%d", space.ID) == spaceToScan {
- filteredSpaces = append(filteredSpaces, space)
- }
- }
- }
- }
-
- log.Info().Msgf(" Total of filtered %d Spaces detected", len(filteredSpaces))
- return filteredSpaces, nil
-}
-
-func (p *ConfluencePlugin) getPages(space ConfluenceSpaceResult) (*ConfluencePageResult, error) {
- totalPages, err := p.client.getPagesRequest(space, 0)
-
- if err != nil {
- return nil, fmt.Errorf("unexpected error creating an http request %w", err)
- }
-
- actualSize := len(totalPages.Pages)
-
- for actualSize == confluenceDefaultWindow {
- morePages, err := p.client.getPagesRequest(space, len(totalPages.Pages))
-
- if err != nil {
- return nil, fmt.Errorf("unexpected error creating an http request %w", err)
- }
-
- totalPages.Pages = append(totalPages.Pages, morePages.Pages...)
- actualSize = len(morePages.Pages)
- }
-
- log.Info().Msgf(" Space - %s have %d pages", space.Name, len(totalPages.Pages))
-
- return totalPages, nil
-}
-
-/*
- * Confluence client
- */
-
-type IConfluenceClient interface {
- getSpacesRequest(start int) (*ConfluenceSpaceResponse, error)
- getPagesRequest(space ConfluenceSpaceResult, start int) (*ConfluencePageResult, error)
- getPageContentRequest(page ConfluencePage, version int) (*ConfluencePageContent, error)
-}
-
-type confluenceClient struct {
- baseURL string
- token string
- username string
-}
-
-func newConfluenceClient(baseURL, token, username string) IConfluenceClient {
- return &confluenceClient{
- baseURL: baseURL,
- token: token,
- username: username,
- }
-}
-
-func (c *confluenceClient) GetCredentials() (string, string) {
- return c.username, c.token
-}
-
-func (c *confluenceClient) GetAuthorizationHeader() string {
- if c.username == "" || c.token == "" {
- return ""
- }
- return utils.CreateBasicAuthCredentials(c)
-}
-
-func (c *confluenceClient) getSpacesRequest(start int) (*ConfluenceSpaceResponse, error) {
- url := fmt.Sprintf("%s/rest/api/space?start=%d", c.baseURL, start)
- body, _, err := utils.HttpRequest(http.MethodGet, url, c, utils.RetrySettings{})
- if err != nil {
- return nil, fmt.Errorf("unexpected error creating an http request %w", err)
- }
-
- response := &ConfluenceSpaceResponse{}
- jsonErr := json.Unmarshal(body, response)
- if jsonErr != nil {
- return nil, fmt.Errorf("could not unmarshal response %w", err)
- }
-
- return response, nil
-}
-
-func (c *confluenceClient) getPagesRequest(space ConfluenceSpaceResult, start int) (*ConfluencePageResult, error) {
- url := fmt.Sprintf("%s/rest/api/space/%s/content?start=%d", c.baseURL, space.Key, start)
- body, _, err := utils.HttpRequest(http.MethodGet, url, c, utils.RetrySettings{})
-
- if err != nil {
- return nil, fmt.Errorf("unexpected error creating an http request %w", err)
- }
-
- response := ConfluencePageResponse{}
- jsonErr := json.Unmarshal(body, &response)
- if jsonErr != nil {
- return nil, fmt.Errorf("could not unmarshal response %w", err)
- }
-
- return &response.Results, nil
-}
-
-func (c *confluenceClient) getPageContentRequest(page ConfluencePage, version int) (*ConfluencePageContent, error) {
- var url string
-
- // If no version given get the latest, else get the specified version
- if version == 0 {
- url = fmt.Sprintf("%s/rest/api/content/%s?expand=body.storage,version,history.previousVersion", c.baseURL, page.ID)
-
- } else {
- url = fmt.Sprintf("%s/rest/api/content/%s?status=historical&version=%d&expand=body.storage,version,history.previousVersion", c.baseURL, page.ID, version)
- }
-
- request, _, err := utils.HttpRequest(http.MethodGet, url, c, utils.RetrySettings{MaxRetries: 3, ErrorCodes: []int{500}})
- if err != nil {
- return nil, fmt.Errorf("unexpected error creating an http request %w", err)
- }
- pageContent := ConfluencePageContent{}
- jsonErr := json.Unmarshal(request, &pageContent)
- if jsonErr != nil {
- return nil, jsonErr
- }
-
- return &pageContent, nil
-}
-
-type ConfluenceSpaceResult struct {
- ID int `json:"id"`
- Key string `json:"key"`
- Name string `json:"Name"`
- Links map[string]string `json:"_links"`
-}
-
-type ConfluenceSpaceResponse struct {
- Results []ConfluenceSpaceResult `json:"results"`
- Size int `json:"size"`
-}
-
-type ConfluencePageContent struct {
- Body struct {
- Storage struct {
- Value string `json:"value"`
- } `json:"storage"`
- } `json:"body"`
- History struct {
- PreviousVersion struct {
- Number int
- } `json:"previousVersion"`
- } `json:"history"`
- Version struct {
- Number int `json:"number"`
- } `json:"version"`
- Links map[string]string `json:"_links"`
-}
-
-type ConfluencePage struct {
- ID string `json:"id"`
- Type string `json:"type"`
- Title string `json:"title"`
-}
-
-type ConfluencePageResult struct {
- Pages []ConfluencePage `json:"results"`
-}
-
-type ConfluencePageResponse struct {
- Results ConfluencePageResult `json:"page"`
-}
+package plugins
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "strings"
+ "sync"
+
+ "github.com/checkmarx/2ms/lib/utils"
+ "github.com/rs/zerolog/log"
+ "github.com/spf13/cobra"
+
+ "net/url"
+)
+
+const (
+ argUrl = "url"
+ argSpaces = "spaces"
+ argUsername = "username"
+ argToken = "token"
+ argHistory = "history"
+ confluenceDefaultWindow = 25
+ confluenceMaxRequests = 500
+)
+
+var (
+ username string
+ token string
+)
+
+type ConfluencePlugin struct {
+ Plugin
+ Spaces []string
+ History bool
+ client IConfluenceClient
+
+ itemsChan chan ISourceItem
+ errorsChan chan error
+}
+
+func (p *ConfluencePlugin) GetName() string {
+ return "confluence"
+}
+
+func isValidURL(cmd *cobra.Command, args []string) error {
+ urlStr := args[0]
+ parsedURL, err := url.Parse(urlStr)
+ if err != nil && parsedURL.Scheme != "https" {
+ return fmt.Errorf("invalid URL format")
+ }
+ return nil
+}
+
+func (p *ConfluencePlugin) DefineCommand(items chan ISourceItem, errors chan error) (*cobra.Command, error) {
+ p.itemsChan = items
+ p.errorsChan = errors
+
+ var confluenceCmd = &cobra.Command{
+ Use: fmt.Sprintf("%s ", p.GetName()),
+ Short: "Scan Confluence server",
+ Long: "Scan Confluence server for sensitive information",
+ Example: fmt.Sprintf(" 2ms %s https://checkmarx.atlassian.net/wiki", p.GetName()),
+ Args: cobra.MatchAll(cobra.ExactArgs(1), isValidURL),
+ Run: func(cmd *cobra.Command, args []string) {
+ err := p.initialize(args[0])
+ if err != nil {
+ errors <- fmt.Errorf("error while initializing confluence plugin: %w", err)
+ }
+ wg := &sync.WaitGroup{}
+ p.scanConfluence(wg)
+ wg.Wait()
+ close(items)
+ },
+ }
+
+ flags := confluenceCmd.Flags()
+ flags.StringSliceVar(&p.Spaces, argSpaces, []string{}, "Confluence spaces: The names or IDs of the spaces to scan")
+ flags.StringVar(&username, argUsername, "", "Confluence user name or email for authentication")
+ flags.StringVar(&token, argToken, "", "The Confluence API token for authentication")
+ flags.BoolVar(&p.History, argHistory, false, "Scan pages history")
+
+ return confluenceCmd, nil
+}
+
+func (p *ConfluencePlugin) initialize(urlArg string) error {
+
+ url := strings.TrimRight(urlArg, "/")
+
+ if username == "" || token == "" {
+ log.Warn().Msg("confluence credentials were not provided. The scan will be made anonymously only for the public pages")
+ }
+ p.client = newConfluenceClient(url, token, username)
+
+ p.Limit = make(chan struct{}, confluenceMaxRequests)
+ return nil
+}
+
+func (p *ConfluencePlugin) scanConfluence(wg *sync.WaitGroup) {
+ spaces, err := p.getSpaces()
+ if err != nil {
+ p.errorsChan <- err
+ }
+
+ for _, space := range spaces {
+ wg.Add(1)
+ go p.scanConfluenceSpace(wg, space)
+ }
+}
+
+func (p *ConfluencePlugin) scanConfluenceSpace(wg *sync.WaitGroup, space ConfluenceSpaceResult) {
+ defer wg.Done()
+
+ pages, err := p.getPages(space)
+ if err != nil {
+ p.errorsChan <- err
+ return
+ }
+
+ for _, page := range pages.Pages {
+ wg.Add(1)
+ p.Limit <- struct{}{}
+ go func(page ConfluencePage) {
+ p.scanPageAllVersions(wg, page, space)
+ <-p.Limit
+ }(page)
+ }
+}
+
+func (p *ConfluencePlugin) scanPageAllVersions(wg *sync.WaitGroup, page ConfluencePage, space ConfluenceSpaceResult) {
+ defer wg.Done()
+
+ previousVersion := p.scanPageVersion(page, space, 0)
+ if !p.History {
+ return
+ }
+
+ for previousVersion > 0 {
+ previousVersion = p.scanPageVersion(page, space, previousVersion)
+ }
+}
+
+func (p *ConfluencePlugin) scanPageVersion(page ConfluencePage, space ConfluenceSpaceResult, version int) int {
+ pageContent, err := p.client.getPageContentRequest(page, version)
+ if err != nil {
+ p.errorsChan <- err
+ return 0
+ }
+ itemID := fmt.Sprintf("%s-%s-%s", p.GetName(), space.Key, page.ID)
+ p.itemsChan <- convertPageToItem(pageContent, itemID)
+
+ return pageContent.History.PreviousVersion.Number
+}
+
+func convertPageToItem(pageContent *ConfluencePageContent, itemID string) ISourceItem {
+ return &item{
+ Content: &pageContent.Body.Storage.Value,
+ ID: itemID,
+ Source: pageContent.Links["base"] + pageContent.Links["webui"],
+ }
+}
+
+func (p *ConfluencePlugin) getSpaces() ([]ConfluenceSpaceResult, error) {
+ totalSpaces, err := p.client.getSpacesRequest(0)
+ if err != nil {
+ return nil, err
+ }
+
+ actualSize := totalSpaces.Size
+
+ for actualSize == confluenceDefaultWindow {
+ moreSpaces, err := p.client.getSpacesRequest(totalSpaces.Size)
+ if err != nil {
+ return nil, err
+ }
+
+ totalSpaces.Results = append(totalSpaces.Results, moreSpaces.Results...)
+ totalSpaces.Size += moreSpaces.Size
+ actualSize = moreSpaces.Size
+ }
+
+ if len(p.Spaces) == 0 {
+ log.Info().Msgf(" Total of all %d Spaces detected", len(totalSpaces.Results))
+ return totalSpaces.Results, nil
+ }
+
+ filteredSpaces := make([]ConfluenceSpaceResult, 0)
+ if len(p.Spaces) > 0 {
+ for _, space := range totalSpaces.Results {
+ for _, spaceToScan := range p.Spaces {
+ if space.Key == spaceToScan || space.Name == spaceToScan || fmt.Sprintf("%d", space.ID) == spaceToScan {
+ filteredSpaces = append(filteredSpaces, space)
+ }
+ }
+ }
+ }
+
+ log.Info().Msgf(" Total of filtered %d Spaces detected", len(filteredSpaces))
+ return filteredSpaces, nil
+}
+
+func (p *ConfluencePlugin) getPages(space ConfluenceSpaceResult) (*ConfluencePageResult, error) {
+ totalPages, err := p.client.getPagesRequest(space, 0)
+
+ if err != nil {
+ return nil, fmt.Errorf("unexpected error creating an http request %w", err)
+ }
+
+ actualSize := len(totalPages.Pages)
+
+ for actualSize == confluenceDefaultWindow {
+ morePages, err := p.client.getPagesRequest(space, len(totalPages.Pages))
+
+ if err != nil {
+ return nil, fmt.Errorf("unexpected error creating an http request %w", err)
+ }
+
+ totalPages.Pages = append(totalPages.Pages, morePages.Pages...)
+ actualSize = len(morePages.Pages)
+ }
+
+ log.Info().Msgf(" Space - %s have %d pages", space.Name, len(totalPages.Pages))
+
+ return totalPages, nil
+}
+
+/*
+ * Confluence client
+ */
+
+type IConfluenceClient interface {
+ getSpacesRequest(start int) (*ConfluenceSpaceResponse, error)
+ getPagesRequest(space ConfluenceSpaceResult, start int) (*ConfluencePageResult, error)
+ getPageContentRequest(page ConfluencePage, version int) (*ConfluencePageContent, error)
+}
+
+type confluenceClient struct {
+ baseURL string
+ token string
+ username string
+}
+
+func newConfluenceClient(baseURL, token, username string) IConfluenceClient {
+ return &confluenceClient{
+ baseURL: baseURL,
+ token: token,
+ username: username,
+ }
+}
+
+func (c *confluenceClient) GetCredentials() (string, string) {
+ return c.username, c.token
+}
+
+func (c *confluenceClient) GetAuthorizationHeader() string {
+ if c.username == "" || c.token == "" {
+ return ""
+ }
+ return utils.CreateBasicAuthCredentials(c)
+}
+
+func (c *confluenceClient) getSpacesRequest(start int) (*ConfluenceSpaceResponse, error) {
+ url := fmt.Sprintf("%s/rest/api/space?start=%d", c.baseURL, start)
+ body, _, err := utils.HttpRequest(http.MethodGet, url, c, utils.RetrySettings{})
+ if err != nil {
+ return nil, fmt.Errorf("unexpected error creating an http request %w", err)
+ }
+
+ response := &ConfluenceSpaceResponse{}
+ jsonErr := json.Unmarshal(body, response)
+ if jsonErr != nil {
+ return nil, fmt.Errorf("could not unmarshal response %w", err)
+ }
+
+ return response, nil
+}
+
+func (c *confluenceClient) getPagesRequest(space ConfluenceSpaceResult, start int) (*ConfluencePageResult, error) {
+ url := fmt.Sprintf("%s/rest/api/space/%s/content?start=%d", c.baseURL, space.Key, start)
+ body, _, err := utils.HttpRequest(http.MethodGet, url, c, utils.RetrySettings{})
+
+ if err != nil {
+ return nil, fmt.Errorf("unexpected error creating an http request %w", err)
+ }
+
+ response := ConfluencePageResponse{}
+ jsonErr := json.Unmarshal(body, &response)
+ if jsonErr != nil {
+ return nil, fmt.Errorf("could not unmarshal response %w", err)
+ }
+
+ return &response.Results, nil
+}
+
+func (c *confluenceClient) getPageContentRequest(page ConfluencePage, version int) (*ConfluencePageContent, error) {
+ var url string
+
+ // If no version given get the latest, else get the specified version
+ if version == 0 {
+ url = fmt.Sprintf("%s/rest/api/content/%s?expand=body.storage,version,history.previousVersion", c.baseURL, page.ID)
+
+ } else {
+ url = fmt.Sprintf("%s/rest/api/content/%s?status=historical&version=%d&expand=body.storage,version,history.previousVersion", c.baseURL, page.ID, version)
+ }
+
+ request, _, err := utils.HttpRequest(http.MethodGet, url, c, utils.RetrySettings{MaxRetries: 3, ErrorCodes: []int{500}})
+ if err != nil {
+ return nil, fmt.Errorf("unexpected error creating an http request %w", err)
+ }
+ pageContent := ConfluencePageContent{}
+ jsonErr := json.Unmarshal(request, &pageContent)
+ if jsonErr != nil {
+ return nil, jsonErr
+ }
+
+ return &pageContent, nil
+}
+
+type ConfluenceSpaceResult struct {
+ ID int `json:"id"`
+ Key string `json:"key"`
+ Name string `json:"Name"`
+ Links map[string]string `json:"_links"`
+}
+
+type ConfluenceSpaceResponse struct {
+ Results []ConfluenceSpaceResult `json:"results"`
+ Size int `json:"size"`
+}
+
+type ConfluencePageContent struct {
+ Body struct {
+ Storage struct {
+ Value string `json:"value"`
+ } `json:"storage"`
+ } `json:"body"`
+ History struct {
+ PreviousVersion struct {
+ Number int
+ } `json:"previousVersion"`
+ } `json:"history"`
+ Version struct {
+ Number int `json:"number"`
+ } `json:"version"`
+ Links map[string]string `json:"_links"`
+}
+
+type ConfluencePage struct {
+ ID string `json:"id"`
+ Type string `json:"type"`
+ Title string `json:"title"`
+}
+
+type ConfluencePageResult struct {
+ Pages []ConfluencePage `json:"results"`
+}
+
+type ConfluencePageResponse struct {
+ Results ConfluencePageResult `json:"page"`
+}
diff --git a/plugins/confluence_test.go b/plugins/confluence_test.go
index d3be1de1..801e3a3c 100644
--- a/plugins/confluence_test.go
+++ b/plugins/confluence_test.go
@@ -1,820 +1,820 @@
-package plugins
-
-import (
- "bytes"
- "fmt"
- "github.com/rs/zerolog"
- "github.com/rs/zerolog/log"
- "github.com/stretchr/testify/assert"
- "sort"
- "strconv"
- "strings"
- "sync"
- "testing"
-)
-
-type mockConfluenceClient struct {
- pageContentResponse []*ConfluencePageContent
- pageContentError error
- numberOfPages int
- firstPagesRequestError error
- secondPagesRequestError error
- numberOfSpaces int
- firstSpacesRequestError error
- secondSpacesRequestError error
-}
-
-func (m *mockConfluenceClient) getSpacesRequest(start int) (*ConfluenceSpaceResponse, error) {
- if m.firstSpacesRequestError != nil && start == 0 {
- return nil, m.firstSpacesRequestError
- }
-
- if m.secondSpacesRequestError != nil && start != 0 {
- return nil, m.secondSpacesRequestError
- }
-
- var spaces []ConfluenceSpaceResult
- for i := start; i < m.numberOfSpaces && i-start < confluenceDefaultWindow; i++ {
- spaces = append(spaces, ConfluenceSpaceResult{ID: i, Key: strconv.Itoa(i)})
- }
- return &ConfluenceSpaceResponse{
- Results: spaces,
- Size: len(spaces),
- }, nil
-}
-
-func (m *mockConfluenceClient) getPagesRequest(space ConfluenceSpaceResult, start int) (*ConfluencePageResult, error) {
- if m.firstPagesRequestError != nil && start == 0 {
- return nil, m.firstPagesRequestError
- }
-
- if m.secondPagesRequestError != nil && start != 0 {
- return nil, m.secondPagesRequestError
- }
-
- var pages []ConfluencePage
- for i := start; i < m.numberOfPages && i-start < confluenceDefaultWindow; i++ {
- pages = append(pages, ConfluencePage{ID: strconv.Itoa(i)})
- }
- return &ConfluencePageResult{Pages: pages}, nil
-}
-
-func (m *mockConfluenceClient) getPageContentRequest(page ConfluencePage, version int) (*ConfluencePageContent, error) {
- if m.pageContentError != nil {
- return nil, m.pageContentError
- }
- return m.pageContentResponse[version], nil
-}
-
-func TestGetPages(t *testing.T) {
- tests := []struct {
- name string
- numberOfPages int
- firstPagesRequestError error
- secondPagesRequestError error
- expectedError error
- }{
- {
- name: "Error while getting pages before pagination is required",
- numberOfPages: confluenceDefaultWindow - 2,
- firstPagesRequestError: fmt.Errorf("some error before pagination is required"),
- expectedError: fmt.Errorf("unexpected error creating an http request %w", fmt.Errorf("some error before pagination is required")),
- },
- {
- name: "error while getting pages after pagination is required",
- numberOfPages: confluenceDefaultWindow + 2,
- secondPagesRequestError: fmt.Errorf("some error after pagination required"),
- expectedError: fmt.Errorf("unexpected error creating an http request %w", fmt.Errorf("some error after pagination required")),
- },
- {
- name: "pages less than confluenceDefaultWindow",
- numberOfPages: confluenceDefaultWindow - 2,
- expectedError: nil,
- },
- {
- name: "exactly confluenceDefaultWindow pages",
- numberOfPages: confluenceDefaultWindow,
- expectedError: nil,
- },
- {
- name: "fetching more pages after confluenceDefaultWindow",
- numberOfPages: confluenceDefaultWindow + 2,
- expectedError: nil,
- },
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- mockClient := mockConfluenceClient{
- numberOfPages: tt.numberOfPages,
- firstPagesRequestError: tt.firstPagesRequestError,
- secondPagesRequestError: tt.secondPagesRequestError,
- }
- space := ConfluenceSpaceResult{Name: "Test Space"}
- plugin := &ConfluencePlugin{client: &mockClient}
- result, err := plugin.getPages(space)
- assert.Equal(t, tt.expectedError, err)
- if tt.expectedError == nil {
- var expectedResult ConfluencePageResult
- for i := 0; i < tt.numberOfPages; i++ {
- expectedResult.Pages = append(expectedResult.Pages, ConfluencePage{ID: strconv.Itoa(i)})
- }
- assert.Equal(t, &expectedResult, result)
- }
- })
- }
-}
-
-func TestGetSpaces(t *testing.T) {
- tests := []struct {
- name string
- numberOfSpaces int
- firstSpacesRequestError error
- secondSpacesRequestError error
- expectedError error
- filteredSpaces []string
- }{
- {
- name: "Error while getting spaces before pagination is required",
- numberOfSpaces: confluenceDefaultWindow - 2,
- firstSpacesRequestError: fmt.Errorf("some error before pagination is required"),
- expectedError: fmt.Errorf("some error before pagination is required"),
- },
- {
- name: "error while getting spaces after pagination is required",
- numberOfSpaces: confluenceDefaultWindow + 2,
- secondSpacesRequestError: fmt.Errorf("some error after pagination required"),
- expectedError: fmt.Errorf("some error after pagination required"),
- },
- {
- name: "zero spaces",
- numberOfSpaces: 0,
- expectedError: nil,
- },
- {
- name: "spaces less than confluenceDefaultWindow",
- numberOfSpaces: confluenceDefaultWindow - 2,
- expectedError: nil,
- },
- {
- name: "exactly confluenceDefaultWindow spaces",
- numberOfSpaces: confluenceDefaultWindow,
- expectedError: nil,
- },
- {
- name: "fetching more spaces after confluenceDefaultWindow",
- numberOfSpaces: confluenceDefaultWindow + 2,
- expectedError: nil,
- },
- {
- name: "fetching spaces with filtered spaces",
- numberOfSpaces: 5,
- filteredSpaces: []string{"2"},
- expectedError: nil,
- },
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- mockClient := mockConfluenceClient{
- numberOfSpaces: tt.numberOfSpaces,
- firstSpacesRequestError: tt.firstSpacesRequestError,
- secondSpacesRequestError: tt.secondSpacesRequestError,
- }
- plugin := &ConfluencePlugin{
- client: &mockClient,
- Spaces: tt.filteredSpaces,
- }
- result, err := plugin.getSpaces()
- assert.Equal(t, tt.expectedError, err)
- if tt.expectedError == nil {
- var expectedResult []ConfluenceSpaceResult
- if len(tt.filteredSpaces) == 0 {
- for i := 0; i < tt.numberOfSpaces; i++ {
- expectedResult = append(expectedResult, ConfluenceSpaceResult{ID: i, Key: strconv.Itoa(i)})
- }
- } else {
- for i := 0; i < len(tt.filteredSpaces); i++ {
- id, errConvert := strconv.Atoi(tt.filteredSpaces[i])
- key := tt.filteredSpaces[i]
- assert.NoError(t, errConvert)
- expectedResult = append(expectedResult, ConfluenceSpaceResult{ID: id, Key: key})
- }
- }
- assert.Equal(t, expectedResult, result)
- }
- })
- }
-}
-
-func TestScanPageVersion(t *testing.T) {
- tests := []struct {
- name string
- mockPageContent *ConfluencePageContent
- mockError error
- expectError bool
- expectItem bool
- expectedVersionNum int
- }{
- {
- name: "Successful page scan with previous version",
- mockPageContent: &ConfluencePageContent{
- Body: struct {
- Storage struct {
- Value string `json:"value"`
- } `json:"storage"`
- }(struct {
- Storage struct {
- Value string
- }
- }{
- Storage: struct{ Value string }{Value: "Page content"},
- }),
- History: struct {
- PreviousVersion struct{ Number int } `json:"previousVersion"`
- }(struct {
- PreviousVersion struct {
- Number int
- }
- }{PreviousVersion: struct{ Number int }{Number: 1}}),
- Links: map[string]string{
- "base": "https://example.com",
- "webui": "/wiki/page",
- },
- },
- expectItem: true,
- expectedVersionNum: 1,
- },
- {
- name: "Error fetching page content",
- mockError: fmt.Errorf("fetch error"),
- expectError: true,
- expectItem: false,
- expectedVersionNum: 0,
- },
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- mockClient := &mockConfluenceClient{
- pageContentResponse: []*ConfluencePageContent{tt.mockPageContent},
- pageContentError: tt.mockError,
- }
-
- errorsChan := make(chan error, 1)
- itemsChan := make(chan ISourceItem, 1)
-
- plugin := &ConfluencePlugin{
- client: mockClient,
- errorsChan: errorsChan,
- itemsChan: itemsChan,
- }
-
- page := ConfluencePage{ID: "pageID"}
- space := ConfluenceSpaceResult{Key: "spaceKey"}
-
- result := plugin.scanPageVersion(page, space, 0)
-
- assert.Equal(t, tt.expectedVersionNum, result)
-
- if tt.expectError {
- assert.NotEmpty(t, errorsChan)
- err := <-errorsChan
- assert.Equal(t, tt.mockError, err)
- } else {
- assert.Empty(t, errorsChan)
- }
-
- if tt.expectItem {
- assert.NotEmpty(t, itemsChan)
- actualItem := <-itemsChan
- expectedItem := item{
- Content: ptrToString("Page content"),
- ID: "confluence-spaceKey-pageID",
- Source: "https://example.com/wiki/page",
- }
- assert.Equal(t, &expectedItem, actualItem)
- } else {
- assert.Empty(t, itemsChan)
- }
-
- close(itemsChan)
- close(errorsChan)
- })
- }
-}
-
-func TestScanPageAllVersions(t *testing.T) {
- tests := []struct {
- name string
- mockPageContents []*ConfluencePageContent
- expectedErrors []error
- expectedItems []item
- historyEnabled bool
- }{
- {
- name: "scan with multiple versions and history enabled",
- mockPageContents: []*ConfluencePageContent{
- {
- Body: struct {
- Storage struct {
- Value string `json:"value"`
- } `json:"storage"`
- }(struct {
- Storage struct {
- Value string
- }
- }{
- Storage: struct{ Value string }{Value: "Page content 1"},
- }),
- History: struct {
- PreviousVersion struct{ Number int } `json:"previousVersion"`
- }(struct{ PreviousVersion struct{ Number int } }{PreviousVersion: struct{ Number int }{Number: 2}}),
- Links: map[string]string{
- "base": "https://example.com",
- "webui": "/wiki/page",
- },
- },
- {
- Body: struct {
- Storage struct {
- Value string `json:"value"`
- } `json:"storage"`
- }(struct {
- Storage struct {
- Value string
- }
- }{
- Storage: struct{ Value string }{Value: "Page content 2"},
- }),
- History: struct {
- PreviousVersion struct{ Number int } `json:"previousVersion"`
- }(struct{ PreviousVersion struct{ Number int } }{PreviousVersion: struct{ Number int }{Number: 0}}),
- Links: map[string]string{
- "base": "https://example.com",
- "webui": "/wiki/page",
- },
- },
- {
- Body: struct {
- Storage struct {
- Value string `json:"value"`
- } `json:"storage"`
- }(struct {
- Storage struct {
- Value string
- }
- }{
- Storage: struct{ Value string }{Value: "Page content 3"},
- }),
- History: struct {
- PreviousVersion struct{ Number int } `json:"previousVersion"`
- }(struct{ PreviousVersion struct{ Number int } }{PreviousVersion: struct{ Number int }{Number: 1}}),
- Links: map[string]string{
- "base": "https://example.com",
- "webui": "/wiki/page",
- },
- },
- },
- historyEnabled: true,
- expectedErrors: nil,
- expectedItems: []item{
- {
- Content: ptrToString("Page content 1"),
- ID: "confluence-spaceKey-pageID",
- Source: "https://example.com/wiki/page",
- },
- {
- Content: ptrToString("Page content 3"),
- ID: "confluence-spaceKey-pageID",
- Source: "https://example.com/wiki/page",
- },
- {
- Content: ptrToString("Page content 2"),
- ID: "confluence-spaceKey-pageID",
- Source: "https://example.com/wiki/page",
- },
- },
- },
- {
- name: "scan with multiple versions and history disabled",
- mockPageContents: []*ConfluencePageContent{
- {
- Body: struct {
- Storage struct {
- Value string `json:"value"`
- } `json:"storage"`
- }(struct {
- Storage struct {
- Value string
- }
- }{
- Storage: struct{ Value string }{Value: "Page content 1"},
- }),
- History: struct {
- PreviousVersion struct{ Number int } `json:"previousVersion"`
- }(struct{ PreviousVersion struct{ Number int } }{PreviousVersion: struct{ Number int }{Number: 2}}),
- Links: map[string]string{
- "base": "https://example.com",
- "webui": "/wiki/page",
- },
- },
- {
- Body: struct {
- Storage struct {
- Value string `json:"value"`
- } `json:"storage"`
- }(struct {
- Storage struct {
- Value string
- }
- }{
- Storage: struct{ Value string }{Value: "Page content 2"},
- }),
- History: struct {
- PreviousVersion struct{ Number int } `json:"previousVersion"`
- }(struct{ PreviousVersion struct{ Number int } }{PreviousVersion: struct{ Number int }{Number: 0}}),
- Links: map[string]string{
- "base": "https://example.com",
- "webui": "/wiki/page",
- },
- },
- {
- Body: struct {
- Storage struct {
- Value string `json:"value"`
- } `json:"storage"`
- }(struct {
- Storage struct {
- Value string
- }
- }{
- Storage: struct{ Value string }{Value: "Page content 3"},
- }),
- History: struct {
- PreviousVersion struct{ Number int } `json:"previousVersion"`
- }(struct{ PreviousVersion struct{ Number int } }{PreviousVersion: struct{ Number int }{Number: 1}}),
- Links: map[string]string{
- "base": "https://example.com",
- "webui": "/wiki/page",
- },
- },
- },
- historyEnabled: false,
- expectedErrors: nil,
- expectedItems: []item{
- {
- Content: ptrToString("Page content 1"),
- ID: "confluence-spaceKey-pageID",
- Source: "https://example.com/wiki/page",
- },
- },
- },
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- mockClient := &mockConfluenceClient{
- pageContentResponse: tt.mockPageContents,
- }
-
- errorsChan := make(chan error, 3)
- itemsChan := make(chan ISourceItem, 3)
-
- plugin := &ConfluencePlugin{
- client: mockClient,
- errorsChan: errorsChan,
- itemsChan: itemsChan,
- History: tt.historyEnabled,
- }
-
- page := ConfluencePage{ID: "pageID"}
- space := ConfluenceSpaceResult{Key: "spaceKey"}
-
- var wg sync.WaitGroup
- wg.Add(1)
- go plugin.scanPageAllVersions(&wg, page, space)
- wg.Wait()
-
- if len(tt.expectedErrors) == 0 {
- assert.Empty(t, errorsChan)
- }
-
- assert.Equal(t, len(tt.expectedErrors), len(errorsChan))
- for _, expectedError := range tt.expectedErrors {
- actualError := <-errorsChan
- assert.Equal(t, expectedError, actualError)
- }
-
- assert.Equal(t, len(tt.expectedItems), len(itemsChan))
- for _, expectedItem := range tt.expectedItems {
- actualItem := <-itemsChan
- assert.Equal(t, &expectedItem, actualItem)
- }
-
- close(errorsChan)
- close(itemsChan)
- })
- }
-}
-
-func TestScanConfluenceSpace(t *testing.T) {
- tests := []struct {
- name string
- firstPagesRequestError error
- expectedError error
- numberOfPages int
- mockPageContent *ConfluencePageContent
- }{
- {
- name: "getPages returns error",
- firstPagesRequestError: fmt.Errorf("some error before pagination is required"),
- expectedError: fmt.Errorf("unexpected error creating an http request %w", fmt.Errorf("some error before pagination is required")),
- numberOfPages: 1,
- },
- {
- name: "scan confluence space with multiple pages",
- firstPagesRequestError: nil,
- expectedError: nil,
- numberOfPages: 3,
- mockPageContent: &ConfluencePageContent{
- Body: struct {
- Storage struct {
- Value string `json:"value"`
- } `json:"storage"`
- }(struct {
- Storage struct {
- Value string
- }
- }{
- Storage: struct{ Value string }{Value: "Page content"},
- }),
- History: struct {
- PreviousVersion struct{ Number int } `json:"previousVersion"`
- }(struct {
- PreviousVersion struct {
- Number int
- }
- }{PreviousVersion: struct{ Number int }{Number: 1}}),
- Links: map[string]string{
- "base": "https://example.com",
- "webui": "/wiki/page",
- },
- },
- },
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- mockClient := &mockConfluenceClient{
- firstPagesRequestError: tt.firstPagesRequestError,
- numberOfPages: tt.numberOfPages,
- pageContentResponse: []*ConfluencePageContent{tt.mockPageContent},
- }
-
- errorsChan := make(chan error, 1)
- itemsChan := make(chan ISourceItem, 3)
-
- plugin := Plugin{
- Limit: make(chan struct{}, confluenceMaxRequests),
- }
-
- confluencePlugin := &ConfluencePlugin{
- Plugin: plugin,
- client: mockClient,
- errorsChan: errorsChan,
- itemsChan: itemsChan,
- }
-
- space := ConfluenceSpaceResult{Key: "spaceKey"}
- var wg sync.WaitGroup
- wg.Add(1)
-
- go confluencePlugin.scanConfluenceSpace(&wg, space)
-
- wg.Wait()
-
- close(errorsChan)
- close(itemsChan)
-
- if tt.expectedError != nil {
- actualError := <-errorsChan
- assert.Equal(t, tt.expectedError, actualError)
- } else {
- assert.Empty(t, errorsChan)
- var actualItems []ISourceItem
- for i := 0; i < tt.numberOfPages; i++ {
- actualItem := <-itemsChan
- actualItems = append(actualItems, actualItem)
- }
- sort.Slice(actualItems, func(i, j int) bool {
- return actualItems[i].GetID() < actualItems[j].GetID()
- })
- for i := 0; i < tt.numberOfPages; i++ {
- expectedItem := item{
- Content: ptrToString("Page content"),
- ID: fmt.Sprintf("confluence-spaceKey-%d", i),
- Source: "https://example.com/wiki/page",
- }
- assert.Equal(t, &expectedItem, actualItems[i])
- }
- }
- })
- }
-}
-
-func TestScanConfluence(t *testing.T) {
- tests := []struct {
- name string
- firstSpacesRequestError error
- expectedError error
- numberOfSpaces int
- numberOfPages int
- mockPageContent *ConfluencePageContent
- }{
- {
- name: "getSpaces returns error",
- firstSpacesRequestError: fmt.Errorf("some error before pagination is required"),
- expectedError: fmt.Errorf("some error before pagination is required"),
- numberOfPages: 1,
- },
- {
- name: "scan confluence with multiple spaces and pages",
- firstSpacesRequestError: nil,
- expectedError: nil,
- numberOfSpaces: 3,
- numberOfPages: 3,
- mockPageContent: &ConfluencePageContent{
- Body: struct {
- Storage struct {
- Value string `json:"value"`
- } `json:"storage"`
- }(struct {
- Storage struct {
- Value string
- }
- }{
- Storage: struct{ Value string }{Value: "Page content"},
- }),
- History: struct {
- PreviousVersion struct{ Number int } `json:"previousVersion"`
- }(struct {
- PreviousVersion struct {
- Number int
- }
- }{PreviousVersion: struct{ Number int }{Number: 1}}),
- Links: map[string]string{
- "base": "https://example.com",
- "webui": "/wiki/page",
- },
- },
- },
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- mockClient := &mockConfluenceClient{
- firstSpacesRequestError: tt.firstSpacesRequestError,
- numberOfPages: tt.numberOfPages,
- numberOfSpaces: tt.numberOfSpaces,
- pageContentResponse: []*ConfluencePageContent{tt.mockPageContent},
- }
-
- errorsChan := make(chan error, 1)
- itemsChan := make(chan ISourceItem, 3)
-
- plugin := Plugin{
- Limit: make(chan struct{}, confluenceMaxRequests),
- }
-
- confluencePlugin := &ConfluencePlugin{
- Plugin: plugin,
- client: mockClient,
- errorsChan: errorsChan,
- itemsChan: itemsChan,
- }
-
- wg := &sync.WaitGroup{}
-
- go confluencePlugin.scanConfluence(wg)
-
- wg.Wait()
-
- if tt.expectedError != nil {
- actualError := <-errorsChan
- assert.Equal(t, tt.expectedError, actualError)
- } else {
- assert.Empty(t, errorsChan)
- var actualItems []ISourceItem
- for i := 0; i < tt.numberOfSpaces; i++ {
- for j := 0; j < tt.numberOfPages; j++ {
- actualItem := <-itemsChan
- actualItems = append(actualItems, actualItem)
- }
- }
- sort.Slice(actualItems, func(i, j int) bool {
- splitID := func(id string) (string, string) {
- parts := strings.Split(id, "-")
- return parts[1], parts[2]
- }
-
- spaceKey1, pageID1 := splitID(actualItems[i].GetID())
- spaceKey2, pageID2 := splitID(actualItems[j].GetID())
-
- if spaceKey1 != spaceKey2 {
- return spaceKey1 < spaceKey2
- }
- return pageID1 < pageID2
- })
- for i := 0; i < tt.numberOfSpaces; i++ {
- for j := 0; j < tt.numberOfPages; j++ {
- expectedItem := item{
- Content: ptrToString("Page content"),
- ID: fmt.Sprintf("confluence-%d-%d", i, j),
- Source: "https://example.com/wiki/page",
- }
- assert.Equal(t, &expectedItem, actualItems[i*tt.numberOfPages+j])
- }
- }
- }
- })
- }
-}
-
-func TestInitializeConfluence(t *testing.T) {
- tests := []struct {
- name string
- urlArg string
- username string
- token string
- expectURL string
- expectLimit int
- expectWarn bool
- }{
- {
- name: "Valid credentials",
- urlArg: "https://example.com/",
- username: "user",
- token: "token",
- expectURL: "https://example.com",
- expectLimit: confluenceMaxRequests,
- expectWarn: false,
- },
- {
- name: "No credentials provided",
- urlArg: "https://example.com/",
- username: "",
- token: "",
- expectURL: "https://example.com",
- expectLimit: confluenceMaxRequests,
- expectWarn: true,
- },
- {
- name: "URL without trailing slash",
- urlArg: "https://example.com",
- username: "user",
- token: "token",
- expectURL: "https://example.com",
- expectLimit: confluenceMaxRequests,
- expectWarn: false,
- },
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- var logBuf bytes.Buffer
- log.Logger = zerolog.New(&logBuf)
-
- username = tt.username
- token = tt.token
-
- p := &ConfluencePlugin{}
-
- err := p.initialize(tt.urlArg)
- assert.NoError(t, err)
-
- assert.NotNil(t, p.client)
- client, ok := p.client.(*confluenceClient)
- assert.True(t, ok, "Client should be of type *confluenceClient")
-
- assert.Equal(t, tt.expectURL, client.baseURL)
-
- assert.Equal(t, tt.username, client.username)
- assert.Equal(t, tt.token, client.token)
-
- assert.NotNil(t, p.Limit)
- assert.Equal(t, tt.expectLimit, cap(p.Limit))
-
- logOutput := logBuf.String()
- if tt.expectWarn {
- assert.Contains(t, logOutput, "confluence credentials were not provided", "Expected warning log missing")
- } else {
- assert.NotContains(t, logOutput, "confluence credentials were not provided", "Unexpected warning log found")
- }
- })
- }
-}
-
-func ptrToString(s string) *string {
- return &s
-}
+package plugins
+
+import (
+ "bytes"
+ "fmt"
+ "github.com/rs/zerolog"
+ "github.com/rs/zerolog/log"
+ "github.com/stretchr/testify/assert"
+ "sort"
+ "strconv"
+ "strings"
+ "sync"
+ "testing"
+)
+
+type mockConfluenceClient struct {
+ pageContentResponse []*ConfluencePageContent
+ pageContentError error
+ numberOfPages int
+ firstPagesRequestError error
+ secondPagesRequestError error
+ numberOfSpaces int
+ firstSpacesRequestError error
+ secondSpacesRequestError error
+}
+
+func (m *mockConfluenceClient) getSpacesRequest(start int) (*ConfluenceSpaceResponse, error) {
+ if m.firstSpacesRequestError != nil && start == 0 {
+ return nil, m.firstSpacesRequestError
+ }
+
+ if m.secondSpacesRequestError != nil && start != 0 {
+ return nil, m.secondSpacesRequestError
+ }
+
+ var spaces []ConfluenceSpaceResult
+ for i := start; i < m.numberOfSpaces && i-start < confluenceDefaultWindow; i++ {
+ spaces = append(spaces, ConfluenceSpaceResult{ID: i, Key: strconv.Itoa(i)})
+ }
+ return &ConfluenceSpaceResponse{
+ Results: spaces,
+ Size: len(spaces),
+ }, nil
+}
+
+func (m *mockConfluenceClient) getPagesRequest(space ConfluenceSpaceResult, start int) (*ConfluencePageResult, error) {
+ if m.firstPagesRequestError != nil && start == 0 {
+ return nil, m.firstPagesRequestError
+ }
+
+ if m.secondPagesRequestError != nil && start != 0 {
+ return nil, m.secondPagesRequestError
+ }
+
+ var pages []ConfluencePage
+ for i := start; i < m.numberOfPages && i-start < confluenceDefaultWindow; i++ {
+ pages = append(pages, ConfluencePage{ID: strconv.Itoa(i)})
+ }
+ return &ConfluencePageResult{Pages: pages}, nil
+}
+
+func (m *mockConfluenceClient) getPageContentRequest(page ConfluencePage, version int) (*ConfluencePageContent, error) {
+ if m.pageContentError != nil {
+ return nil, m.pageContentError
+ }
+ return m.pageContentResponse[version], nil
+}
+
+func TestGetPages(t *testing.T) {
+ tests := []struct {
+ name string
+ numberOfPages int
+ firstPagesRequestError error
+ secondPagesRequestError error
+ expectedError error
+ }{
+ {
+ name: "Error while getting pages before pagination is required",
+ numberOfPages: confluenceDefaultWindow - 2,
+ firstPagesRequestError: fmt.Errorf("some error before pagination is required"),
+ expectedError: fmt.Errorf("unexpected error creating an http request %w", fmt.Errorf("some error before pagination is required")),
+ },
+ {
+ name: "error while getting pages after pagination is required",
+ numberOfPages: confluenceDefaultWindow + 2,
+ secondPagesRequestError: fmt.Errorf("some error after pagination required"),
+ expectedError: fmt.Errorf("unexpected error creating an http request %w", fmt.Errorf("some error after pagination required")),
+ },
+ {
+ name: "pages less than confluenceDefaultWindow",
+ numberOfPages: confluenceDefaultWindow - 2,
+ expectedError: nil,
+ },
+ {
+ name: "exactly confluenceDefaultWindow pages",
+ numberOfPages: confluenceDefaultWindow,
+ expectedError: nil,
+ },
+ {
+ name: "fetching more pages after confluenceDefaultWindow",
+ numberOfPages: confluenceDefaultWindow + 2,
+ expectedError: nil,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ mockClient := mockConfluenceClient{
+ numberOfPages: tt.numberOfPages,
+ firstPagesRequestError: tt.firstPagesRequestError,
+ secondPagesRequestError: tt.secondPagesRequestError,
+ }
+ space := ConfluenceSpaceResult{Name: "Test Space"}
+ plugin := &ConfluencePlugin{client: &mockClient}
+ result, err := plugin.getPages(space)
+ assert.Equal(t, tt.expectedError, err)
+ if tt.expectedError == nil {
+ var expectedResult ConfluencePageResult
+ for i := 0; i < tt.numberOfPages; i++ {
+ expectedResult.Pages = append(expectedResult.Pages, ConfluencePage{ID: strconv.Itoa(i)})
+ }
+ assert.Equal(t, &expectedResult, result)
+ }
+ })
+ }
+}
+
+func TestGetSpaces(t *testing.T) {
+ tests := []struct {
+ name string
+ numberOfSpaces int
+ firstSpacesRequestError error
+ secondSpacesRequestError error
+ expectedError error
+ filteredSpaces []string
+ }{
+ {
+ name: "Error while getting spaces before pagination is required",
+ numberOfSpaces: confluenceDefaultWindow - 2,
+ firstSpacesRequestError: fmt.Errorf("some error before pagination is required"),
+ expectedError: fmt.Errorf("some error before pagination is required"),
+ },
+ {
+ name: "error while getting spaces after pagination is required",
+ numberOfSpaces: confluenceDefaultWindow + 2,
+ secondSpacesRequestError: fmt.Errorf("some error after pagination required"),
+ expectedError: fmt.Errorf("some error after pagination required"),
+ },
+ {
+ name: "zero spaces",
+ numberOfSpaces: 0,
+ expectedError: nil,
+ },
+ {
+ name: "spaces less than confluenceDefaultWindow",
+ numberOfSpaces: confluenceDefaultWindow - 2,
+ expectedError: nil,
+ },
+ {
+ name: "exactly confluenceDefaultWindow spaces",
+ numberOfSpaces: confluenceDefaultWindow,
+ expectedError: nil,
+ },
+ {
+ name: "fetching more spaces after confluenceDefaultWindow",
+ numberOfSpaces: confluenceDefaultWindow + 2,
+ expectedError: nil,
+ },
+ {
+ name: "fetching spaces with filtered spaces",
+ numberOfSpaces: 5,
+ filteredSpaces: []string{"2"},
+ expectedError: nil,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ mockClient := mockConfluenceClient{
+ numberOfSpaces: tt.numberOfSpaces,
+ firstSpacesRequestError: tt.firstSpacesRequestError,
+ secondSpacesRequestError: tt.secondSpacesRequestError,
+ }
+ plugin := &ConfluencePlugin{
+ client: &mockClient,
+ Spaces: tt.filteredSpaces,
+ }
+ result, err := plugin.getSpaces()
+ assert.Equal(t, tt.expectedError, err)
+ if tt.expectedError == nil {
+ var expectedResult []ConfluenceSpaceResult
+ if len(tt.filteredSpaces) == 0 {
+ for i := 0; i < tt.numberOfSpaces; i++ {
+ expectedResult = append(expectedResult, ConfluenceSpaceResult{ID: i, Key: strconv.Itoa(i)})
+ }
+ } else {
+ for i := 0; i < len(tt.filteredSpaces); i++ {
+ id, errConvert := strconv.Atoi(tt.filteredSpaces[i])
+ key := tt.filteredSpaces[i]
+ assert.NoError(t, errConvert)
+ expectedResult = append(expectedResult, ConfluenceSpaceResult{ID: id, Key: key})
+ }
+ }
+ assert.Equal(t, expectedResult, result)
+ }
+ })
+ }
+}
+
+func TestScanPageVersion(t *testing.T) {
+ tests := []struct {
+ name string
+ mockPageContent *ConfluencePageContent
+ mockError error
+ expectError bool
+ expectItem bool
+ expectedVersionNum int
+ }{
+ {
+ name: "Successful page scan with previous version",
+ mockPageContent: &ConfluencePageContent{
+ Body: struct {
+ Storage struct {
+ Value string `json:"value"`
+ } `json:"storage"`
+ }(struct {
+ Storage struct {
+ Value string
+ }
+ }{
+ Storage: struct{ Value string }{Value: "Page content"},
+ }),
+ History: struct {
+ PreviousVersion struct{ Number int } `json:"previousVersion"`
+ }(struct {
+ PreviousVersion struct {
+ Number int
+ }
+ }{PreviousVersion: struct{ Number int }{Number: 1}}),
+ Links: map[string]string{
+ "base": "https://example.com",
+ "webui": "/wiki/page",
+ },
+ },
+ expectItem: true,
+ expectedVersionNum: 1,
+ },
+ {
+ name: "Error fetching page content",
+ mockError: fmt.Errorf("fetch error"),
+ expectError: true,
+ expectItem: false,
+ expectedVersionNum: 0,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ mockClient := &mockConfluenceClient{
+ pageContentResponse: []*ConfluencePageContent{tt.mockPageContent},
+ pageContentError: tt.mockError,
+ }
+
+ errorsChan := make(chan error, 1)
+ itemsChan := make(chan ISourceItem, 1)
+
+ plugin := &ConfluencePlugin{
+ client: mockClient,
+ errorsChan: errorsChan,
+ itemsChan: itemsChan,
+ }
+
+ page := ConfluencePage{ID: "pageID"}
+ space := ConfluenceSpaceResult{Key: "spaceKey"}
+
+ result := plugin.scanPageVersion(page, space, 0)
+
+ assert.Equal(t, tt.expectedVersionNum, result)
+
+ if tt.expectError {
+ assert.NotEmpty(t, errorsChan)
+ err := <-errorsChan
+ assert.Equal(t, tt.mockError, err)
+ } else {
+ assert.Empty(t, errorsChan)
+ }
+
+ if tt.expectItem {
+ assert.NotEmpty(t, itemsChan)
+ actualItem := <-itemsChan
+ expectedItem := item{
+ Content: ptrToString("Page content"),
+ ID: "confluence-spaceKey-pageID",
+ Source: "https://example.com/wiki/page",
+ }
+ assert.Equal(t, &expectedItem, actualItem)
+ } else {
+ assert.Empty(t, itemsChan)
+ }
+
+ close(itemsChan)
+ close(errorsChan)
+ })
+ }
+}
+
+func TestScanPageAllVersions(t *testing.T) {
+ tests := []struct {
+ name string
+ mockPageContents []*ConfluencePageContent
+ expectedErrors []error
+ expectedItems []item
+ historyEnabled bool
+ }{
+ {
+ name: "scan with multiple versions and history enabled",
+ mockPageContents: []*ConfluencePageContent{
+ {
+ Body: struct {
+ Storage struct {
+ Value string `json:"value"`
+ } `json:"storage"`
+ }(struct {
+ Storage struct {
+ Value string
+ }
+ }{
+ Storage: struct{ Value string }{Value: "Page content 1"},
+ }),
+ History: struct {
+ PreviousVersion struct{ Number int } `json:"previousVersion"`
+ }(struct{ PreviousVersion struct{ Number int } }{PreviousVersion: struct{ Number int }{Number: 2}}),
+ Links: map[string]string{
+ "base": "https://example.com",
+ "webui": "/wiki/page",
+ },
+ },
+ {
+ Body: struct {
+ Storage struct {
+ Value string `json:"value"`
+ } `json:"storage"`
+ }(struct {
+ Storage struct {
+ Value string
+ }
+ }{
+ Storage: struct{ Value string }{Value: "Page content 2"},
+ }),
+ History: struct {
+ PreviousVersion struct{ Number int } `json:"previousVersion"`
+ }(struct{ PreviousVersion struct{ Number int } }{PreviousVersion: struct{ Number int }{Number: 0}}),
+ Links: map[string]string{
+ "base": "https://example.com",
+ "webui": "/wiki/page",
+ },
+ },
+ {
+ Body: struct {
+ Storage struct {
+ Value string `json:"value"`
+ } `json:"storage"`
+ }(struct {
+ Storage struct {
+ Value string
+ }
+ }{
+ Storage: struct{ Value string }{Value: "Page content 3"},
+ }),
+ History: struct {
+ PreviousVersion struct{ Number int } `json:"previousVersion"`
+ }(struct{ PreviousVersion struct{ Number int } }{PreviousVersion: struct{ Number int }{Number: 1}}),
+ Links: map[string]string{
+ "base": "https://example.com",
+ "webui": "/wiki/page",
+ },
+ },
+ },
+ historyEnabled: true,
+ expectedErrors: nil,
+ expectedItems: []item{
+ {
+ Content: ptrToString("Page content 1"),
+ ID: "confluence-spaceKey-pageID",
+ Source: "https://example.com/wiki/page",
+ },
+ {
+ Content: ptrToString("Page content 3"),
+ ID: "confluence-spaceKey-pageID",
+ Source: "https://example.com/wiki/page",
+ },
+ {
+ Content: ptrToString("Page content 2"),
+ ID: "confluence-spaceKey-pageID",
+ Source: "https://example.com/wiki/page",
+ },
+ },
+ },
+ {
+ name: "scan with multiple versions and history disabled",
+ mockPageContents: []*ConfluencePageContent{
+ {
+ Body: struct {
+ Storage struct {
+ Value string `json:"value"`
+ } `json:"storage"`
+ }(struct {
+ Storage struct {
+ Value string
+ }
+ }{
+ Storage: struct{ Value string }{Value: "Page content 1"},
+ }),
+ History: struct {
+ PreviousVersion struct{ Number int } `json:"previousVersion"`
+ }(struct{ PreviousVersion struct{ Number int } }{PreviousVersion: struct{ Number int }{Number: 2}}),
+ Links: map[string]string{
+ "base": "https://example.com",
+ "webui": "/wiki/page",
+ },
+ },
+ {
+ Body: struct {
+ Storage struct {
+ Value string `json:"value"`
+ } `json:"storage"`
+ }(struct {
+ Storage struct {
+ Value string
+ }
+ }{
+ Storage: struct{ Value string }{Value: "Page content 2"},
+ }),
+ History: struct {
+ PreviousVersion struct{ Number int } `json:"previousVersion"`
+ }(struct{ PreviousVersion struct{ Number int } }{PreviousVersion: struct{ Number int }{Number: 0}}),
+ Links: map[string]string{
+ "base": "https://example.com",
+ "webui": "/wiki/page",
+ },
+ },
+ {
+ Body: struct {
+ Storage struct {
+ Value string `json:"value"`
+ } `json:"storage"`
+ }(struct {
+ Storage struct {
+ Value string
+ }
+ }{
+ Storage: struct{ Value string }{Value: "Page content 3"},
+ }),
+ History: struct {
+ PreviousVersion struct{ Number int } `json:"previousVersion"`
+ }(struct{ PreviousVersion struct{ Number int } }{PreviousVersion: struct{ Number int }{Number: 1}}),
+ Links: map[string]string{
+ "base": "https://example.com",
+ "webui": "/wiki/page",
+ },
+ },
+ },
+ historyEnabled: false,
+ expectedErrors: nil,
+ expectedItems: []item{
+ {
+ Content: ptrToString("Page content 1"),
+ ID: "confluence-spaceKey-pageID",
+ Source: "https://example.com/wiki/page",
+ },
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ mockClient := &mockConfluenceClient{
+ pageContentResponse: tt.mockPageContents,
+ }
+
+ errorsChan := make(chan error, 3)
+ itemsChan := make(chan ISourceItem, 3)
+
+ plugin := &ConfluencePlugin{
+ client: mockClient,
+ errorsChan: errorsChan,
+ itemsChan: itemsChan,
+ History: tt.historyEnabled,
+ }
+
+ page := ConfluencePage{ID: "pageID"}
+ space := ConfluenceSpaceResult{Key: "spaceKey"}
+
+ var wg sync.WaitGroup
+ wg.Add(1)
+ go plugin.scanPageAllVersions(&wg, page, space)
+ wg.Wait()
+
+ if len(tt.expectedErrors) == 0 {
+ assert.Empty(t, errorsChan)
+ }
+
+ assert.Equal(t, len(tt.expectedErrors), len(errorsChan))
+ for _, expectedError := range tt.expectedErrors {
+ actualError := <-errorsChan
+ assert.Equal(t, expectedError, actualError)
+ }
+
+ assert.Equal(t, len(tt.expectedItems), len(itemsChan))
+ for _, expectedItem := range tt.expectedItems {
+ actualItem := <-itemsChan
+ assert.Equal(t, &expectedItem, actualItem)
+ }
+
+ close(errorsChan)
+ close(itemsChan)
+ })
+ }
+}
+
+func TestScanConfluenceSpace(t *testing.T) {
+ tests := []struct {
+ name string
+ firstPagesRequestError error
+ expectedError error
+ numberOfPages int
+ mockPageContent *ConfluencePageContent
+ }{
+ {
+ name: "getPages returns error",
+ firstPagesRequestError: fmt.Errorf("some error before pagination is required"),
+ expectedError: fmt.Errorf("unexpected error creating an http request %w", fmt.Errorf("some error before pagination is required")),
+ numberOfPages: 1,
+ },
+ {
+ name: "scan confluence space with multiple pages",
+ firstPagesRequestError: nil,
+ expectedError: nil,
+ numberOfPages: 3,
+ mockPageContent: &ConfluencePageContent{
+ Body: struct {
+ Storage struct {
+ Value string `json:"value"`
+ } `json:"storage"`
+ }(struct {
+ Storage struct {
+ Value string
+ }
+ }{
+ Storage: struct{ Value string }{Value: "Page content"},
+ }),
+ History: struct {
+ PreviousVersion struct{ Number int } `json:"previousVersion"`
+ }(struct {
+ PreviousVersion struct {
+ Number int
+ }
+ }{PreviousVersion: struct{ Number int }{Number: 1}}),
+ Links: map[string]string{
+ "base": "https://example.com",
+ "webui": "/wiki/page",
+ },
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ mockClient := &mockConfluenceClient{
+ firstPagesRequestError: tt.firstPagesRequestError,
+ numberOfPages: tt.numberOfPages,
+ pageContentResponse: []*ConfluencePageContent{tt.mockPageContent},
+ }
+
+ errorsChan := make(chan error, 1)
+ itemsChan := make(chan ISourceItem, 3)
+
+ plugin := Plugin{
+ Limit: make(chan struct{}, confluenceMaxRequests),
+ }
+
+ confluencePlugin := &ConfluencePlugin{
+ Plugin: plugin,
+ client: mockClient,
+ errorsChan: errorsChan,
+ itemsChan: itemsChan,
+ }
+
+ space := ConfluenceSpaceResult{Key: "spaceKey"}
+ var wg sync.WaitGroup
+ wg.Add(1)
+
+ go confluencePlugin.scanConfluenceSpace(&wg, space)
+
+ wg.Wait()
+
+ close(errorsChan)
+ close(itemsChan)
+
+ if tt.expectedError != nil {
+ actualError := <-errorsChan
+ assert.Equal(t, tt.expectedError, actualError)
+ } else {
+ assert.Empty(t, errorsChan)
+ var actualItems []ISourceItem
+ for i := 0; i < tt.numberOfPages; i++ {
+ actualItem := <-itemsChan
+ actualItems = append(actualItems, actualItem)
+ }
+ sort.Slice(actualItems, func(i, j int) bool {
+ return actualItems[i].GetID() < actualItems[j].GetID()
+ })
+ for i := 0; i < tt.numberOfPages; i++ {
+ expectedItem := item{
+ Content: ptrToString("Page content"),
+ ID: fmt.Sprintf("confluence-spaceKey-%d", i),
+ Source: "https://example.com/wiki/page",
+ }
+ assert.Equal(t, &expectedItem, actualItems[i])
+ }
+ }
+ })
+ }
+}
+
+func TestScanConfluence(t *testing.T) {
+ tests := []struct {
+ name string
+ firstSpacesRequestError error
+ expectedError error
+ numberOfSpaces int
+ numberOfPages int
+ mockPageContent *ConfluencePageContent
+ }{
+ {
+ name: "getSpaces returns error",
+ firstSpacesRequestError: fmt.Errorf("some error before pagination is required"),
+ expectedError: fmt.Errorf("some error before pagination is required"),
+ numberOfPages: 1,
+ },
+ {
+ name: "scan confluence with multiple spaces and pages",
+ firstSpacesRequestError: nil,
+ expectedError: nil,
+ numberOfSpaces: 3,
+ numberOfPages: 3,
+ mockPageContent: &ConfluencePageContent{
+ Body: struct {
+ Storage struct {
+ Value string `json:"value"`
+ } `json:"storage"`
+ }(struct {
+ Storage struct {
+ Value string
+ }
+ }{
+ Storage: struct{ Value string }{Value: "Page content"},
+ }),
+ History: struct {
+ PreviousVersion struct{ Number int } `json:"previousVersion"`
+ }(struct {
+ PreviousVersion struct {
+ Number int
+ }
+ }{PreviousVersion: struct{ Number int }{Number: 1}}),
+ Links: map[string]string{
+ "base": "https://example.com",
+ "webui": "/wiki/page",
+ },
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ mockClient := &mockConfluenceClient{
+ firstSpacesRequestError: tt.firstSpacesRequestError,
+ numberOfPages: tt.numberOfPages,
+ numberOfSpaces: tt.numberOfSpaces,
+ pageContentResponse: []*ConfluencePageContent{tt.mockPageContent},
+ }
+
+ errorsChan := make(chan error, 1)
+ itemsChan := make(chan ISourceItem, 3)
+
+ plugin := Plugin{
+ Limit: make(chan struct{}, confluenceMaxRequests),
+ }
+
+ confluencePlugin := &ConfluencePlugin{
+ Plugin: plugin,
+ client: mockClient,
+ errorsChan: errorsChan,
+ itemsChan: itemsChan,
+ }
+
+ wg := &sync.WaitGroup{}
+
+ go confluencePlugin.scanConfluence(wg)
+
+ wg.Wait()
+
+ if tt.expectedError != nil {
+ actualError := <-errorsChan
+ assert.Equal(t, tt.expectedError, actualError)
+ } else {
+ assert.Empty(t, errorsChan)
+ var actualItems []ISourceItem
+ for i := 0; i < tt.numberOfSpaces; i++ {
+ for j := 0; j < tt.numberOfPages; j++ {
+ actualItem := <-itemsChan
+ actualItems = append(actualItems, actualItem)
+ }
+ }
+ sort.Slice(actualItems, func(i, j int) bool {
+ splitID := func(id string) (string, string) {
+ parts := strings.Split(id, "-")
+ return parts[1], parts[2]
+ }
+
+ spaceKey1, pageID1 := splitID(actualItems[i].GetID())
+ spaceKey2, pageID2 := splitID(actualItems[j].GetID())
+
+ if spaceKey1 != spaceKey2 {
+ return spaceKey1 < spaceKey2
+ }
+ return pageID1 < pageID2
+ })
+ for i := 0; i < tt.numberOfSpaces; i++ {
+ for j := 0; j < tt.numberOfPages; j++ {
+ expectedItem := item{
+ Content: ptrToString("Page content"),
+ ID: fmt.Sprintf("confluence-%d-%d", i, j),
+ Source: "https://example.com/wiki/page",
+ }
+ assert.Equal(t, &expectedItem, actualItems[i*tt.numberOfPages+j])
+ }
+ }
+ }
+ })
+ }
+}
+
+func TestInitializeConfluence(t *testing.T) {
+ tests := []struct {
+ name string
+ urlArg string
+ username string
+ token string
+ expectURL string
+ expectLimit int
+ expectWarn bool
+ }{
+ {
+ name: "Valid credentials",
+ urlArg: "https://example.com/",
+ username: "user",
+ token: "token",
+ expectURL: "https://example.com",
+ expectLimit: confluenceMaxRequests,
+ expectWarn: false,
+ },
+ {
+ name: "No credentials provided",
+ urlArg: "https://example.com/",
+ username: "",
+ token: "",
+ expectURL: "https://example.com",
+ expectLimit: confluenceMaxRequests,
+ expectWarn: true,
+ },
+ {
+ name: "URL without trailing slash",
+ urlArg: "https://example.com",
+ username: "user",
+ token: "token",
+ expectURL: "https://example.com",
+ expectLimit: confluenceMaxRequests,
+ expectWarn: false,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ var logBuf bytes.Buffer
+ log.Logger = zerolog.New(&logBuf)
+
+ username = tt.username
+ token = tt.token
+
+ p := &ConfluencePlugin{}
+
+ err := p.initialize(tt.urlArg)
+ assert.NoError(t, err)
+
+ assert.NotNil(t, p.client)
+ client, ok := p.client.(*confluenceClient)
+ assert.True(t, ok, "Client should be of type *confluenceClient")
+
+ assert.Equal(t, tt.expectURL, client.baseURL)
+
+ assert.Equal(t, tt.username, client.username)
+ assert.Equal(t, tt.token, client.token)
+
+ assert.NotNil(t, p.Limit)
+ assert.Equal(t, tt.expectLimit, cap(p.Limit))
+
+ logOutput := logBuf.String()
+ if tt.expectWarn {
+ assert.Contains(t, logOutput, "confluence credentials were not provided", "Expected warning log missing")
+ } else {
+ assert.NotContains(t, logOutput, "confluence credentials were not provided", "Unexpected warning log found")
+ }
+ })
+ }
+}
+
+func ptrToString(s string) *string {
+ return &s
+}
diff --git a/plugins/discord.go b/plugins/discord.go
index 6bf798df..dc9029c9 100644
--- a/plugins/discord.go
+++ b/plugins/discord.go
@@ -1,289 +1,289 @@
-package plugins
-
-import (
- "fmt"
- "sync"
- "time"
-
- "github.com/bwmarrin/discordgo"
- "github.com/rs/zerolog"
- "github.com/rs/zerolog/log"
- "github.com/spf13/cobra"
-)
-
-const (
- tokenFlag = "token"
- serversFlag = "server"
- channelsFlag = "channel"
- fromDateFlag = "duration"
- messagesCountFlag = "messages-count"
-)
-
-const defaultDateFrom = time.Hour * 24 * 14
-
-type DiscordPlugin struct {
- Token string
- Guilds []string
- Channels []string
- Count int
- BackwardDuration time.Duration
- Session *discordgo.Session
-
- errChan chan error
- itemChan chan ISourceItem
- waitGroup *sync.WaitGroup
-}
-
-func (p *DiscordPlugin) GetName() string {
- return "discord"
-}
-
-func (p *DiscordPlugin) DefineCommand(items chan ISourceItem, errors chan error) (*cobra.Command, error) {
- var discordCmd = &cobra.Command{
- Use: fmt.Sprintf("%s --%s TOKEN --%s SERVER", p.GetName(), tokenFlag, serversFlag),
- Short: "Scan Discord server",
- Long: "Scan Discord server for sensitive information.",
- }
- flags := discordCmd.Flags()
-
- flags.StringVar(&p.Token, tokenFlag, "", "Discord token [required]")
- err := discordCmd.MarkFlagRequired(tokenFlag)
- if err != nil {
- return nil, fmt.Errorf("error while marking '%s' flag as required: %w", tokenFlag, err)
- }
- flags.StringSliceVar(&p.Guilds, serversFlag, []string{}, "Discord servers IDs to scan [required]")
- err = discordCmd.MarkFlagRequired(serversFlag)
- if err != nil {
- return nil, fmt.Errorf("error while marking '%s' flag as required: %w", serversFlag, err)
- }
- flags.StringSliceVar(&p.Channels, channelsFlag, []string{}, "Discord channels IDs to scan. If not provided, all channels will be scanned")
- flags.DurationVar(&p.BackwardDuration, fromDateFlag, defaultDateFrom, "The time interval to scan from the current time. For example, 24h for 24 hours or 336h0m0s for 14 days.")
- flags.IntVar(&p.Count, messagesCountFlag, 0, "The number of messages to scan. If not provided, all messages will be scanned until the fromDate flag value.")
-
- discordCmd.Run = func(cmd *cobra.Command, args []string) {
- err := p.initialize()
- if err != nil {
- errors <- fmt.Errorf("discord plugin initialization failed: %w", err)
- return
- }
-
- wg := &sync.WaitGroup{}
- p.getItems(items, errors, wg)
- wg.Wait()
- close(items)
- }
-
- return discordCmd, nil
-}
-
-func (p *DiscordPlugin) initialize() error {
- if len(p.Channels) == 0 {
- log.Warn().Msg("discord channels not provided. Will scan all channels")
- }
-
- if p.Count == 0 && p.BackwardDuration == 0 {
- return fmt.Errorf("discord messages count or from date arg is missing. Plugin initialization failed")
- }
-
- return nil
-}
-
-func (p *DiscordPlugin) getItems(itemsChan chan ISourceItem, errChan chan error, wg *sync.WaitGroup) {
- p.errChan = errChan
- p.itemChan = itemsChan
- p.waitGroup = wg
-
- err := p.getDiscordReady()
- if err != nil {
- errChan <- err
- return
- }
-
- guilds := p.getGuildsByNameOrIDs()
- log.Info().Msgf("Found %d guilds", len(guilds))
-
- p.waitGroup.Add(len(guilds))
- for _, guild := range guilds {
- go p.readGuildMessages(guild)
- }
-}
-
-func (p *DiscordPlugin) getDiscordReady() (err error) {
- p.Session, err = discordgo.New(p.Token)
- if err != nil {
- return err
- }
-
- p.Session.StateEnabled = true
- ready := make(chan error)
- p.Session.AddHandlerOnce(func(s *discordgo.Session, r *discordgo.Ready) {
- ready <- nil
- })
- go func() {
- err := p.Session.Open()
- if err != nil {
- ready <- err
- }
- }()
- time.AfterFunc(time.Second*10, func() {
- ready <- fmt.Errorf("discord session timeout")
- })
-
- err = <-ready
- if err != nil {
- return err
- }
-
- return nil
-}
-
-func (p *DiscordPlugin) getGuildsByNameOrIDs() []*discordgo.Guild {
- var result []*discordgo.Guild
-
- for _, guild := range p.Guilds {
- for _, g := range p.Session.State.Guilds {
- if g.Name == guild || g.ID == guild {
- result = append(result, g)
- }
- }
- }
-
- return result
-}
-
-func (p *DiscordPlugin) readGuildMessages(guild *discordgo.Guild) {
- defer p.waitGroup.Done()
-
- guildLogger := log.With().Str("guild", guild.Name).Logger()
- guildLogger.Debug().Send()
-
- selectedChannels := p.getChannelsByNameOrIDs(guild)
- guildLogger.Info().Msgf("Found %d channels", len(selectedChannels))
-
- p.waitGroup.Add(len(selectedChannels))
- for _, channel := range selectedChannels {
- go p.readChannelMessages(channel)
- }
-}
-
-func (p *DiscordPlugin) getChannelsByNameOrIDs(guild *discordgo.Guild) []*discordgo.Channel {
- var result []*discordgo.Channel
- if len(p.Channels) == 0 {
- return guild.Channels
- }
-
- for _, channel := range p.Channels {
- for _, c := range guild.Channels {
- if c.Name == channel || c.ID == channel {
- result = append(result, c)
- }
- }
- }
-
- return result
-}
-
-func (p *DiscordPlugin) readChannelMessages(channel *discordgo.Channel) {
- defer p.waitGroup.Done()
-
- channelLogger := log.With().Str("guildID", channel.GuildID).Str("channel", channel.Name).Logger()
- channelLogger.Debug().Send()
-
- permission, err := p.Session.UserChannelPermissions(p.Session.State.User.ID, channel.ID)
- if err != nil {
- if err, ok := err.(*discordgo.RESTError); ok {
- if err.Message.Code == 50001 {
- channelLogger.Debug().Msg("No read permissions")
- return
- }
- }
-
- channelLogger.Error().Err(err).Msg("Failed to get permissions")
- p.errChan <- err
- return
- }
- if permission&discordgo.PermissionViewChannel == 0 {
- channelLogger.Debug().Msg("No read permissions")
- return
- }
- if channel.Type != discordgo.ChannelTypeGuildText {
- channelLogger.Debug().Msg("Not a text channel")
- return
- }
-
- messages, err := p.getMessages(channel.ID, channelLogger)
- if err != nil {
- channelLogger.Error().Err(err).Msg("Failed to get messages")
- p.errChan <- err
- return
- }
- channelLogger.Info().Msgf("Found %d messages", len(messages))
-
- items := convertMessagesToItems(p.GetName(), channel.GuildID, &messages)
- for _, item := range *items {
- p.itemChan <- item
- }
-}
-
-func (p *DiscordPlugin) getMessages(channelID string, logger zerolog.Logger) ([]*discordgo.Message, error) {
- var messages []*discordgo.Message
- threadMessages := []*discordgo.Message{}
-
- var beforeID string
-
- m, err := p.Session.ChannelMessages(channelID, 100, beforeID, "", "")
- if err != nil {
- return nil, err
- }
-
- lastMessage := false
- for len(m) > 0 && !lastMessage {
-
- for _, message := range m {
-
- timeSince := time.Since(message.Timestamp)
- if p.BackwardDuration > 0 && timeSince > p.BackwardDuration {
- logger.Debug().Msgf("Reached time limit (%s). Last message is %s old", p.BackwardDuration.String(), timeSince.Round(time.Hour).String())
- lastMessage = true
- break
- }
-
- if p.Count > 0 && len(messages) == p.Count {
- logger.Debug().Msgf("Reached message count (%d)", p.Count)
- lastMessage = true
- break
- }
-
- if message.Thread != nil {
- logger.Info().Msgf("Found thread %s", message.Thread.Name)
- tMgs, err := p.getMessages(message.Thread.ID, logger.With().Str("thread", message.Thread.Name).Logger())
- if err != nil {
- return nil, err
- }
- threadMessages = append(threadMessages, tMgs...)
- }
-
- messages = append(messages, message)
- beforeID = message.ID
- }
-
- m, err = p.Session.ChannelMessages(channelID, 100, beforeID, "", "")
- if err != nil {
- return nil, err
- }
- }
-
- return append(messages, threadMessages...), nil
-}
-
-func convertMessagesToItems(pluginName, guildId string, messages *[]*discordgo.Message) *[]ISourceItem {
- items := []ISourceItem{}
- for _, message := range *messages {
- items = append(items, item{
- Content: &message.Content,
- ID: fmt.Sprintf("%s-%s-%s-%s", pluginName, guildId, message.ChannelID, message.ID),
- Source: fmt.Sprintf("https://discord.com/channels/%s/%s/%s", guildId, message.ChannelID, message.ID),
- })
- }
- return &items
-}
+package plugins
+
+import (
+ "fmt"
+ "sync"
+ "time"
+
+ "github.com/bwmarrin/discordgo"
+ "github.com/rs/zerolog"
+ "github.com/rs/zerolog/log"
+ "github.com/spf13/cobra"
+)
+
+const (
+ tokenFlag = "token"
+ serversFlag = "server"
+ channelsFlag = "channel"
+ fromDateFlag = "duration"
+ messagesCountFlag = "messages-count"
+)
+
+const defaultDateFrom = time.Hour * 24 * 14
+
+type DiscordPlugin struct {
+ Token string
+ Guilds []string
+ Channels []string
+ Count int
+ BackwardDuration time.Duration
+ Session *discordgo.Session
+
+ errChan chan error
+ itemChan chan ISourceItem
+ waitGroup *sync.WaitGroup
+}
+
+func (p *DiscordPlugin) GetName() string {
+ return "discord"
+}
+
+func (p *DiscordPlugin) DefineCommand(items chan ISourceItem, errors chan error) (*cobra.Command, error) {
+ var discordCmd = &cobra.Command{
+ Use: fmt.Sprintf("%s --%s TOKEN --%s SERVER", p.GetName(), tokenFlag, serversFlag),
+ Short: "Scan Discord server",
+ Long: "Scan Discord server for sensitive information.",
+ }
+ flags := discordCmd.Flags()
+
+ flags.StringVar(&p.Token, tokenFlag, "", "Discord token [required]")
+ err := discordCmd.MarkFlagRequired(tokenFlag)
+ if err != nil {
+ return nil, fmt.Errorf("error while marking '%s' flag as required: %w", tokenFlag, err)
+ }
+ flags.StringSliceVar(&p.Guilds, serversFlag, []string{}, "Discord servers IDs to scan [required]")
+ err = discordCmd.MarkFlagRequired(serversFlag)
+ if err != nil {
+ return nil, fmt.Errorf("error while marking '%s' flag as required: %w", serversFlag, err)
+ }
+ flags.StringSliceVar(&p.Channels, channelsFlag, []string{}, "Discord channels IDs to scan. If not provided, all channels will be scanned")
+ flags.DurationVar(&p.BackwardDuration, fromDateFlag, defaultDateFrom, "The time interval to scan from the current time. For example, 24h for 24 hours or 336h0m0s for 14 days.")
+ flags.IntVar(&p.Count, messagesCountFlag, 0, "The number of messages to scan. If not provided, all messages will be scanned until the fromDate flag value.")
+
+ discordCmd.Run = func(cmd *cobra.Command, args []string) {
+ err := p.initialize()
+ if err != nil {
+ errors <- fmt.Errorf("discord plugin initialization failed: %w", err)
+ return
+ }
+
+ wg := &sync.WaitGroup{}
+ p.getItems(items, errors, wg)
+ wg.Wait()
+ close(items)
+ }
+
+ return discordCmd, nil
+}
+
+func (p *DiscordPlugin) initialize() error {
+ if len(p.Channels) == 0 {
+ log.Warn().Msg("discord channels not provided. Will scan all channels")
+ }
+
+ if p.Count == 0 && p.BackwardDuration == 0 {
+ return fmt.Errorf("discord messages count or from date arg is missing. Plugin initialization failed")
+ }
+
+ return nil
+}
+
+func (p *DiscordPlugin) getItems(itemsChan chan ISourceItem, errChan chan error, wg *sync.WaitGroup) {
+ p.errChan = errChan
+ p.itemChan = itemsChan
+ p.waitGroup = wg
+
+ err := p.getDiscordReady()
+ if err != nil {
+ errChan <- err
+ return
+ }
+
+ guilds := p.getGuildsByNameOrIDs()
+ log.Info().Msgf("Found %d guilds", len(guilds))
+
+ p.waitGroup.Add(len(guilds))
+ for _, guild := range guilds {
+ go p.readGuildMessages(guild)
+ }
+}
+
+func (p *DiscordPlugin) getDiscordReady() (err error) {
+ p.Session, err = discordgo.New(p.Token)
+ if err != nil {
+ return err
+ }
+
+ p.Session.StateEnabled = true
+ ready := make(chan error)
+ p.Session.AddHandlerOnce(func(s *discordgo.Session, r *discordgo.Ready) {
+ ready <- nil
+ })
+ go func() {
+ err := p.Session.Open()
+ if err != nil {
+ ready <- err
+ }
+ }()
+ time.AfterFunc(time.Second*10, func() {
+ ready <- fmt.Errorf("discord session timeout")
+ })
+
+ err = <-ready
+ if err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func (p *DiscordPlugin) getGuildsByNameOrIDs() []*discordgo.Guild {
+ var result []*discordgo.Guild
+
+ for _, guild := range p.Guilds {
+ for _, g := range p.Session.State.Guilds {
+ if g.Name == guild || g.ID == guild {
+ result = append(result, g)
+ }
+ }
+ }
+
+ return result
+}
+
+func (p *DiscordPlugin) readGuildMessages(guild *discordgo.Guild) {
+ defer p.waitGroup.Done()
+
+ guildLogger := log.With().Str("guild", guild.Name).Logger()
+ guildLogger.Debug().Send()
+
+ selectedChannels := p.getChannelsByNameOrIDs(guild)
+ guildLogger.Info().Msgf("Found %d channels", len(selectedChannels))
+
+ p.waitGroup.Add(len(selectedChannels))
+ for _, channel := range selectedChannels {
+ go p.readChannelMessages(channel)
+ }
+}
+
+func (p *DiscordPlugin) getChannelsByNameOrIDs(guild *discordgo.Guild) []*discordgo.Channel {
+ var result []*discordgo.Channel
+ if len(p.Channels) == 0 {
+ return guild.Channels
+ }
+
+ for _, channel := range p.Channels {
+ for _, c := range guild.Channels {
+ if c.Name == channel || c.ID == channel {
+ result = append(result, c)
+ }
+ }
+ }
+
+ return result
+}
+
+func (p *DiscordPlugin) readChannelMessages(channel *discordgo.Channel) {
+ defer p.waitGroup.Done()
+
+ channelLogger := log.With().Str("guildID", channel.GuildID).Str("channel", channel.Name).Logger()
+ channelLogger.Debug().Send()
+
+ permission, err := p.Session.UserChannelPermissions(p.Session.State.User.ID, channel.ID)
+ if err != nil {
+ if err, ok := err.(*discordgo.RESTError); ok {
+ if err.Message.Code == 50001 {
+ channelLogger.Debug().Msg("No read permissions")
+ return
+ }
+ }
+
+ channelLogger.Error().Err(err).Msg("Failed to get permissions")
+ p.errChan <- err
+ return
+ }
+ if permission&discordgo.PermissionViewChannel == 0 {
+ channelLogger.Debug().Msg("No read permissions")
+ return
+ }
+ if channel.Type != discordgo.ChannelTypeGuildText {
+ channelLogger.Debug().Msg("Not a text channel")
+ return
+ }
+
+ messages, err := p.getMessages(channel.ID, channelLogger)
+ if err != nil {
+ channelLogger.Error().Err(err).Msg("Failed to get messages")
+ p.errChan <- err
+ return
+ }
+ channelLogger.Info().Msgf("Found %d messages", len(messages))
+
+ items := convertMessagesToItems(p.GetName(), channel.GuildID, &messages)
+ for _, item := range *items {
+ p.itemChan <- item
+ }
+}
+
+func (p *DiscordPlugin) getMessages(channelID string, logger zerolog.Logger) ([]*discordgo.Message, error) {
+ var messages []*discordgo.Message
+ threadMessages := []*discordgo.Message{}
+
+ var beforeID string
+
+ m, err := p.Session.ChannelMessages(channelID, 100, beforeID, "", "")
+ if err != nil {
+ return nil, err
+ }
+
+ lastMessage := false
+ for len(m) > 0 && !lastMessage {
+
+ for _, message := range m {
+
+ timeSince := time.Since(message.Timestamp)
+ if p.BackwardDuration > 0 && timeSince > p.BackwardDuration {
+ logger.Debug().Msgf("Reached time limit (%s). Last message is %s old", p.BackwardDuration.String(), timeSince.Round(time.Hour).String())
+ lastMessage = true
+ break
+ }
+
+ if p.Count > 0 && len(messages) == p.Count {
+ logger.Debug().Msgf("Reached message count (%d)", p.Count)
+ lastMessage = true
+ break
+ }
+
+ if message.Thread != nil {
+ logger.Info().Msgf("Found thread %s", message.Thread.Name)
+ tMgs, err := p.getMessages(message.Thread.ID, logger.With().Str("thread", message.Thread.Name).Logger())
+ if err != nil {
+ return nil, err
+ }
+ threadMessages = append(threadMessages, tMgs...)
+ }
+
+ messages = append(messages, message)
+ beforeID = message.ID
+ }
+
+ m, err = p.Session.ChannelMessages(channelID, 100, beforeID, "", "")
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ return append(messages, threadMessages...), nil
+}
+
+func convertMessagesToItems(pluginName, guildId string, messages *[]*discordgo.Message) *[]ISourceItem {
+ items := []ISourceItem{}
+ for _, message := range *messages {
+ items = append(items, item{
+ Content: &message.Content,
+ ID: fmt.Sprintf("%s-%s-%s-%s", pluginName, guildId, message.ChannelID, message.ID),
+ Source: fmt.Sprintf("https://discord.com/channels/%s/%s/%s", guildId, message.ChannelID, message.ID),
+ })
+ }
+ return &items
+}
diff --git a/plugins/discord_test.go b/plugins/discord_test.go
index 7513c0bb..6145ecde 100644
--- a/plugins/discord_test.go
+++ b/plugins/discord_test.go
@@ -1,453 +1,453 @@
-package plugins
-
-import (
- "bytes"
- "github.com/bwmarrin/discordgo"
- "github.com/rs/zerolog"
- "github.com/rs/zerolog/log"
- "github.com/stretchr/testify/assert"
- "testing"
-)
-
-func TestInitializeDiscord(t *testing.T) {
- tests := []struct {
- name string
- plugin DiscordPlugin
- expectedError string
- expectedLogMsg string
- }{
- {
- name: "Channels provided",
- plugin: DiscordPlugin{
- Channels: []string{"mockChannel1", "mockChannel2"},
- Count: 10,
- BackwardDuration: 5,
- },
- expectedError: "",
- expectedLogMsg: "",
- },
- {
- name: "Channels not provided",
- plugin: DiscordPlugin{
- Channels: []string{},
- Count: 10,
- BackwardDuration: 5,
- },
- expectedError: "",
- expectedLogMsg: "discord channels not provided. Will scan all channels",
- },
- {
- name: "Count and BackwardDuration both zero",
- plugin: DiscordPlugin{
- Channels: []string{"channel1"},
- Count: 0,
- BackwardDuration: 0,
- },
- expectedError: "discord messages count or from date arg is missing. Plugin initialization failed",
- expectedLogMsg: "",
- },
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- var logBuf bytes.Buffer
- log.Logger = zerolog.New(&logBuf)
-
- err := tt.plugin.initialize()
-
- if tt.expectedError != "" {
- assert.Error(t, err)
- assert.Equal(t, tt.expectedError, err.Error())
- } else {
- assert.NoError(t, err)
- }
-
- logOutput := logBuf.String()
- if tt.expectedLogMsg != "" {
- assert.Contains(t, logOutput, tt.expectedLogMsg)
- }
- })
- }
-}
-
-func TestGetGuildsByNameOrIDs(t *testing.T) {
- tests := []struct {
- name string
- plugin DiscordPlugin
- expected []*discordgo.Guild
- }{
- {
- name: "Match only by Name",
- plugin: DiscordPlugin{
- Guilds: []string{"mockGuild1", "mockGuild2"},
- Session: &discordgo.Session{
- State: &discordgo.State{
- Ready: discordgo.Ready{
- Guilds: []*discordgo.Guild{
- {
- Name: "mockGuild0",
- ID: "123456789012345670",
- },
- {
- Name: "mockGuild1",
- ID: "123456789012345671",
- },
- {
- Name: "mockGuild2",
- ID: "123456789012345672",
- },
- {
- Name: "mockGuild4",
- ID: "123456789012345673",
- },
- },
- },
- },
- },
- },
- expected: []*discordgo.Guild{
- {
- Name: "mockGuild1",
- ID: "123456789012345671",
- },
- {
- Name: "mockGuild2",
- ID: "123456789012345672",
- },
- },
- },
- {
- name: "Match only by ID",
- plugin: DiscordPlugin{
- Guilds: []string{"123456789012345671", "123456789012345672"},
- Session: &discordgo.Session{
- State: &discordgo.State{
- Ready: discordgo.Ready{
- Guilds: []*discordgo.Guild{
- {
- Name: "mockGuild0",
- ID: "123456789012345670",
- },
- {
- Name: "mockGuild1",
- ID: "123456789012345671",
- },
- {
- Name: "mockGuild2",
- ID: "123456789012345672",
- },
- {
- Name: "mockGuild4",
- ID: "123456789012345673",
- },
- },
- },
- },
- },
- },
- expected: []*discordgo.Guild{
- {
- Name: "mockGuild1",
- ID: "123456789012345671",
- },
- {
- Name: "mockGuild2",
- ID: "123456789012345672",
- },
- },
- },
- {
- name: "Match by ID and name",
- plugin: DiscordPlugin{
- Guilds: []string{"mockGuild1", "123456789012345672"},
- Session: &discordgo.Session{
- State: &discordgo.State{
- Ready: discordgo.Ready{
- Guilds: []*discordgo.Guild{
- {
- Name: "mockGuild0",
- ID: "123456789012345670",
- },
- {
- Name: "mockGuild1",
- ID: "123456789012345671",
- },
- {
- Name: "mockGuild2",
- ID: "123456789012345672",
- },
- {
- Name: "mockGuild4",
- ID: "123456789012345673",
- },
- },
- },
- },
- },
- },
- expected: []*discordgo.Guild{
- {
- Name: "mockGuild1",
- ID: "123456789012345671",
- },
- {
- Name: "mockGuild2",
- ID: "123456789012345672",
- },
- },
- },
- {
- name: "No match",
- plugin: DiscordPlugin{
- Guilds: []string{"mockGuild5", "123456789012345679"},
- Session: &discordgo.Session{
- State: &discordgo.State{
- Ready: discordgo.Ready{
- Guilds: []*discordgo.Guild{
- {
- Name: "mockGuild0",
- ID: "123456789012345670",
- },
- {
- Name: "mockGuild4",
- ID: "123456789012345673",
- },
- },
- },
- },
- },
- },
- expected: nil,
- },
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- result := tt.plugin.getGuildsByNameOrIDs()
- assert.Equal(t, tt.expected, result)
- })
- }
-}
-
-func TestGetChannelsByNameOrIDs(t *testing.T) {
- tests := []struct {
- name string
- plugin DiscordPlugin
- guild *discordgo.Guild
- expected []*discordgo.Channel
- }{
- {
- name: "No Channels filtered",
- plugin: DiscordPlugin{
- Channels: []string{},
- },
- guild: &discordgo.Guild{
- Channels: []*discordgo.Channel{
- {
- Name: "mockChannel0",
- ID: "123456789012345670",
- },
- {
- Name: "mockChannel1",
- ID: "123456789012345671",
- },
- },
- },
- expected: []*discordgo.Channel{
- {
- Name: "mockChannel0",
- ID: "123456789012345670",
- },
- {
- Name: "mockChannel1",
- ID: "123456789012345671",
- },
- },
- },
- {
- name: "Match only by Channel Name",
- plugin: DiscordPlugin{
- Channels: []string{"mockChannel1", "mockChannel2"},
- },
- guild: &discordgo.Guild{
- Channels: []*discordgo.Channel{
- {
- Name: "mockChannel0",
- ID: "123456789012345670",
- },
- {
- Name: "mockChannel1",
- ID: "123456789012345671",
- },
- {
- Name: "mockChannel2",
- ID: "123456789012345672",
- },
- {
- Name: "mockChannel3",
- ID: "123456789012345673",
- },
- },
- },
- expected: []*discordgo.Channel{
- {
- Name: "mockChannel1",
- ID: "123456789012345671",
- },
- {
- Name: "mockChannel2",
- ID: "123456789012345672",
- },
- },
- },
- {
- name: "Match only by Channel ID",
- plugin: DiscordPlugin{
- Channels: []string{"123456789012345671", "123456789012345672"},
- },
- guild: &discordgo.Guild{
- Channels: []*discordgo.Channel{
- {
- Name: "mockChannel0",
- ID: "123456789012345670",
- },
- {
- Name: "mockChannel1",
- ID: "123456789012345671",
- },
- {
- Name: "mockChannel2",
- ID: "123456789012345672",
- },
- {
- Name: "mockChannel3",
- ID: "123456789012345673",
- },
- },
- },
- expected: []*discordgo.Channel{
- {
- Name: "mockChannel1",
- ID: "123456789012345671",
- },
- {
- Name: "mockChannel2",
- ID: "123456789012345672",
- },
- },
- },
- {
- name: "Match only by Name and Channel ID",
- plugin: DiscordPlugin{
- Channels: []string{"123456789012345671", "mockChannel2"},
- },
- guild: &discordgo.Guild{
- Channels: []*discordgo.Channel{
- {
- Name: "mockChannel0",
- ID: "123456789012345670",
- },
- {
- Name: "mockChannel1",
- ID: "123456789012345671",
- },
- {
- Name: "mockChannel2",
- ID: "123456789012345672",
- },
- {
- Name: "mockChannel3",
- ID: "123456789012345673",
- },
- },
- },
- expected: []*discordgo.Channel{
- {
- Name: "mockChannel1",
- ID: "123456789012345671",
- },
- {
- Name: "mockChannel2",
- ID: "123456789012345672",
- },
- },
- },
- {
- name: "No Match",
- plugin: DiscordPlugin{
- Channels: []string{"mockChannel5"},
- },
- guild: &discordgo.Guild{
- Channels: []*discordgo.Channel{
- {
- Name: "mockChannel0",
- ID: "123456789012345670",
- },
- {
- Name: "mockChannel1",
- ID: "123456789012345671",
- },
- },
- },
- expected: nil,
- },
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- result := tt.plugin.getChannelsByNameOrIDs(tt.guild)
- assert.Equal(t, tt.expected, result)
- })
- }
-}
-
-func TestConvertMessagesToItems(t *testing.T) {
- tests := []struct {
- name string
- pluginName string
- guildId string
- messages []*discordgo.Message
- want []ISourceItem
- }{
- {
- name: "Multiple messages",
- pluginName: "TestPlugin",
- guildId: "12345",
- messages: []*discordgo.Message{
- {
- ID: "67890",
- ChannelID: "112233",
- Content: "mock content 1",
- },
- {
- ID: "67891",
- ChannelID: "112234",
- Content: "mock content 2",
- },
- },
- want: []ISourceItem{
- item{
- Content: ptr("mock content 1"),
- ID: "TestPlugin-12345-112233-67890",
- Source: "https://discord.com/channels/12345/112233/67890",
- },
- item{
- Content: ptr("mock content 2"),
- ID: "TestPlugin-12345-112234-67891",
- Source: "https://discord.com/channels/12345/112234/67891",
- },
- },
- },
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- got := convertMessagesToItems(tt.pluginName, tt.guildId, &tt.messages)
- assert.Equal(t, &tt.want, got)
- })
- }
-}
-
-func ptr(s string) *string {
- return &s
-}
+package plugins
+
+import (
+ "bytes"
+ "github.com/bwmarrin/discordgo"
+ "github.com/rs/zerolog"
+ "github.com/rs/zerolog/log"
+ "github.com/stretchr/testify/assert"
+ "testing"
+)
+
+func TestInitializeDiscord(t *testing.T) {
+ tests := []struct {
+ name string
+ plugin DiscordPlugin
+ expectedError string
+ expectedLogMsg string
+ }{
+ {
+ name: "Channels provided",
+ plugin: DiscordPlugin{
+ Channels: []string{"mockChannel1", "mockChannel2"},
+ Count: 10,
+ BackwardDuration: 5,
+ },
+ expectedError: "",
+ expectedLogMsg: "",
+ },
+ {
+ name: "Channels not provided",
+ plugin: DiscordPlugin{
+ Channels: []string{},
+ Count: 10,
+ BackwardDuration: 5,
+ },
+ expectedError: "",
+ expectedLogMsg: "discord channels not provided. Will scan all channels",
+ },
+ {
+ name: "Count and BackwardDuration both zero",
+ plugin: DiscordPlugin{
+ Channels: []string{"channel1"},
+ Count: 0,
+ BackwardDuration: 0,
+ },
+ expectedError: "discord messages count or from date arg is missing. Plugin initialization failed",
+ expectedLogMsg: "",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ var logBuf bytes.Buffer
+ log.Logger = zerolog.New(&logBuf)
+
+ err := tt.plugin.initialize()
+
+ if tt.expectedError != "" {
+ assert.Error(t, err)
+ assert.Equal(t, tt.expectedError, err.Error())
+ } else {
+ assert.NoError(t, err)
+ }
+
+ logOutput := logBuf.String()
+ if tt.expectedLogMsg != "" {
+ assert.Contains(t, logOutput, tt.expectedLogMsg)
+ }
+ })
+ }
+}
+
+func TestGetGuildsByNameOrIDs(t *testing.T) {
+ tests := []struct {
+ name string
+ plugin DiscordPlugin
+ expected []*discordgo.Guild
+ }{
+ {
+ name: "Match only by Name",
+ plugin: DiscordPlugin{
+ Guilds: []string{"mockGuild1", "mockGuild2"},
+ Session: &discordgo.Session{
+ State: &discordgo.State{
+ Ready: discordgo.Ready{
+ Guilds: []*discordgo.Guild{
+ {
+ Name: "mockGuild0",
+ ID: "123456789012345670",
+ },
+ {
+ Name: "mockGuild1",
+ ID: "123456789012345671",
+ },
+ {
+ Name: "mockGuild2",
+ ID: "123456789012345672",
+ },
+ {
+ Name: "mockGuild4",
+ ID: "123456789012345673",
+ },
+ },
+ },
+ },
+ },
+ },
+ expected: []*discordgo.Guild{
+ {
+ Name: "mockGuild1",
+ ID: "123456789012345671",
+ },
+ {
+ Name: "mockGuild2",
+ ID: "123456789012345672",
+ },
+ },
+ },
+ {
+ name: "Match only by ID",
+ plugin: DiscordPlugin{
+ Guilds: []string{"123456789012345671", "123456789012345672"},
+ Session: &discordgo.Session{
+ State: &discordgo.State{
+ Ready: discordgo.Ready{
+ Guilds: []*discordgo.Guild{
+ {
+ Name: "mockGuild0",
+ ID: "123456789012345670",
+ },
+ {
+ Name: "mockGuild1",
+ ID: "123456789012345671",
+ },
+ {
+ Name: "mockGuild2",
+ ID: "123456789012345672",
+ },
+ {
+ Name: "mockGuild4",
+ ID: "123456789012345673",
+ },
+ },
+ },
+ },
+ },
+ },
+ expected: []*discordgo.Guild{
+ {
+ Name: "mockGuild1",
+ ID: "123456789012345671",
+ },
+ {
+ Name: "mockGuild2",
+ ID: "123456789012345672",
+ },
+ },
+ },
+ {
+ name: "Match by ID and name",
+ plugin: DiscordPlugin{
+ Guilds: []string{"mockGuild1", "123456789012345672"},
+ Session: &discordgo.Session{
+ State: &discordgo.State{
+ Ready: discordgo.Ready{
+ Guilds: []*discordgo.Guild{
+ {
+ Name: "mockGuild0",
+ ID: "123456789012345670",
+ },
+ {
+ Name: "mockGuild1",
+ ID: "123456789012345671",
+ },
+ {
+ Name: "mockGuild2",
+ ID: "123456789012345672",
+ },
+ {
+ Name: "mockGuild4",
+ ID: "123456789012345673",
+ },
+ },
+ },
+ },
+ },
+ },
+ expected: []*discordgo.Guild{
+ {
+ Name: "mockGuild1",
+ ID: "123456789012345671",
+ },
+ {
+ Name: "mockGuild2",
+ ID: "123456789012345672",
+ },
+ },
+ },
+ {
+ name: "No match",
+ plugin: DiscordPlugin{
+ Guilds: []string{"mockGuild5", "123456789012345679"},
+ Session: &discordgo.Session{
+ State: &discordgo.State{
+ Ready: discordgo.Ready{
+ Guilds: []*discordgo.Guild{
+ {
+ Name: "mockGuild0",
+ ID: "123456789012345670",
+ },
+ {
+ Name: "mockGuild4",
+ ID: "123456789012345673",
+ },
+ },
+ },
+ },
+ },
+ },
+ expected: nil,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ result := tt.plugin.getGuildsByNameOrIDs()
+ assert.Equal(t, tt.expected, result)
+ })
+ }
+}
+
+func TestGetChannelsByNameOrIDs(t *testing.T) {
+ tests := []struct {
+ name string
+ plugin DiscordPlugin
+ guild *discordgo.Guild
+ expected []*discordgo.Channel
+ }{
+ {
+ name: "No Channels filtered",
+ plugin: DiscordPlugin{
+ Channels: []string{},
+ },
+ guild: &discordgo.Guild{
+ Channels: []*discordgo.Channel{
+ {
+ Name: "mockChannel0",
+ ID: "123456789012345670",
+ },
+ {
+ Name: "mockChannel1",
+ ID: "123456789012345671",
+ },
+ },
+ },
+ expected: []*discordgo.Channel{
+ {
+ Name: "mockChannel0",
+ ID: "123456789012345670",
+ },
+ {
+ Name: "mockChannel1",
+ ID: "123456789012345671",
+ },
+ },
+ },
+ {
+ name: "Match only by Channel Name",
+ plugin: DiscordPlugin{
+ Channels: []string{"mockChannel1", "mockChannel2"},
+ },
+ guild: &discordgo.Guild{
+ Channels: []*discordgo.Channel{
+ {
+ Name: "mockChannel0",
+ ID: "123456789012345670",
+ },
+ {
+ Name: "mockChannel1",
+ ID: "123456789012345671",
+ },
+ {
+ Name: "mockChannel2",
+ ID: "123456789012345672",
+ },
+ {
+ Name: "mockChannel3",
+ ID: "123456789012345673",
+ },
+ },
+ },
+ expected: []*discordgo.Channel{
+ {
+ Name: "mockChannel1",
+ ID: "123456789012345671",
+ },
+ {
+ Name: "mockChannel2",
+ ID: "123456789012345672",
+ },
+ },
+ },
+ {
+ name: "Match only by Channel ID",
+ plugin: DiscordPlugin{
+ Channels: []string{"123456789012345671", "123456789012345672"},
+ },
+ guild: &discordgo.Guild{
+ Channels: []*discordgo.Channel{
+ {
+ Name: "mockChannel0",
+ ID: "123456789012345670",
+ },
+ {
+ Name: "mockChannel1",
+ ID: "123456789012345671",
+ },
+ {
+ Name: "mockChannel2",
+ ID: "123456789012345672",
+ },
+ {
+ Name: "mockChannel3",
+ ID: "123456789012345673",
+ },
+ },
+ },
+ expected: []*discordgo.Channel{
+ {
+ Name: "mockChannel1",
+ ID: "123456789012345671",
+ },
+ {
+ Name: "mockChannel2",
+ ID: "123456789012345672",
+ },
+ },
+ },
+ {
+ name: "Match only by Name and Channel ID",
+ plugin: DiscordPlugin{
+ Channels: []string{"123456789012345671", "mockChannel2"},
+ },
+ guild: &discordgo.Guild{
+ Channels: []*discordgo.Channel{
+ {
+ Name: "mockChannel0",
+ ID: "123456789012345670",
+ },
+ {
+ Name: "mockChannel1",
+ ID: "123456789012345671",
+ },
+ {
+ Name: "mockChannel2",
+ ID: "123456789012345672",
+ },
+ {
+ Name: "mockChannel3",
+ ID: "123456789012345673",
+ },
+ },
+ },
+ expected: []*discordgo.Channel{
+ {
+ Name: "mockChannel1",
+ ID: "123456789012345671",
+ },
+ {
+ Name: "mockChannel2",
+ ID: "123456789012345672",
+ },
+ },
+ },
+ {
+ name: "No Match",
+ plugin: DiscordPlugin{
+ Channels: []string{"mockChannel5"},
+ },
+ guild: &discordgo.Guild{
+ Channels: []*discordgo.Channel{
+ {
+ Name: "mockChannel0",
+ ID: "123456789012345670",
+ },
+ {
+ Name: "mockChannel1",
+ ID: "123456789012345671",
+ },
+ },
+ },
+ expected: nil,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ result := tt.plugin.getChannelsByNameOrIDs(tt.guild)
+ assert.Equal(t, tt.expected, result)
+ })
+ }
+}
+
+func TestConvertMessagesToItems(t *testing.T) {
+ tests := []struct {
+ name string
+ pluginName string
+ guildId string
+ messages []*discordgo.Message
+ want []ISourceItem
+ }{
+ {
+ name: "Multiple messages",
+ pluginName: "TestPlugin",
+ guildId: "12345",
+ messages: []*discordgo.Message{
+ {
+ ID: "67890",
+ ChannelID: "112233",
+ Content: "mock content 1",
+ },
+ {
+ ID: "67891",
+ ChannelID: "112234",
+ Content: "mock content 2",
+ },
+ },
+ want: []ISourceItem{
+ item{
+ Content: ptr("mock content 1"),
+ ID: "TestPlugin-12345-112233-67890",
+ Source: "https://discord.com/channels/12345/112233/67890",
+ },
+ item{
+ Content: ptr("mock content 2"),
+ ID: "TestPlugin-12345-112234-67891",
+ Source: "https://discord.com/channels/12345/112234/67891",
+ },
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ got := convertMessagesToItems(tt.pluginName, tt.guildId, &tt.messages)
+ assert.Equal(t, &tt.want, got)
+ })
+ }
+}
+
+func ptr(s string) *string {
+ return &s
+}
diff --git a/plugins/filesystem.go b/plugins/filesystem.go
index fc4138ec..f349f503 100644
--- a/plugins/filesystem.go
+++ b/plugins/filesystem.go
@@ -4,11 +4,11 @@ import (
"fmt"
"os"
"path/filepath"
- "sync"
"time"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
+ "golang.org/x/sync/errgroup"
)
const (
@@ -38,9 +38,7 @@ func (p *FileSystemPlugin) DefineCommand(items chan ISourceItem, errors chan err
Run: func(cmd *cobra.Command, args []string) {
log.Info().Msg("Folder plugin started")
- wg := &sync.WaitGroup{}
- p.getFiles(items, errors, wg)
- wg.Wait()
+ p.getFiles(items, errors)
close(items)
},
}
@@ -60,7 +58,7 @@ func (p *FileSystemPlugin) DefineCommand(items chan ISourceItem, errors chan err
return cmd, nil
}
-func (p *FileSystemPlugin) getFiles(items chan ISourceItem, errs chan error, wg *sync.WaitGroup) {
+func (p *FileSystemPlugin) getFiles(items chan ISourceItem, errs chan error) {
fileList := make([]string, 0)
err := filepath.Walk(p.Path, func(path string, fInfo os.FileInfo, err error) error {
if err != nil {
@@ -98,23 +96,25 @@ func (p *FileSystemPlugin) getFiles(items chan ISourceItem, errs chan error, wg
return
}
- p.getItems(items, errs, wg, fileList)
+ p.getItems(items, errs, fileList)
}
-func (p *FileSystemPlugin) getItems(items chan ISourceItem, errs chan error, wg *sync.WaitGroup, fileList []string) {
+func (p *FileSystemPlugin) getItems(items chan ISourceItem, errs chan error, fileList []string) {
+ g := errgroup.Group{}
+ g.SetLimit(1000)
for _, filePath := range fileList {
- wg.Add(1)
- go func(filePath string) {
- defer wg.Done()
+ g.Go(func() error {
actualFile, err := p.getItem(filePath)
if err != nil {
errs <- err
time.Sleep(time.Second) // Temporary fix for incorrect non-error exits; needs a better solution.
- return
+ return nil
}
items <- *actualFile
- }(filePath)
+ return nil
+ })
}
+ g.Wait()
}
func (p *FileSystemPlugin) getItem(filePath string) (*item, error) {
diff --git a/plugins/filesystem_test.go b/plugins/filesystem_test.go
index 2f631113..cdd063ce 100644
--- a/plugins/filesystem_test.go
+++ b/plugins/filesystem_test.go
@@ -2,11 +2,12 @@ package plugins
import (
"fmt"
- "github.com/stretchr/testify/assert"
"os"
"path/filepath"
"sync"
"testing"
+
+ "github.com/stretchr/testify/assert"
)
func TestGetItem(t *testing.T) {
@@ -58,15 +59,12 @@ func TestGetItems(t *testing.T) {
itemsChan := make(chan ISourceItem, len(fileList))
errsChan := make(chan error, len(fileList))
- var wg sync.WaitGroup
plugin := &FileSystemPlugin{
ProjectName: "TestProject",
}
- plugin.getItems(itemsChan, errsChan, &wg, fileList)
-
- wg.Wait()
+ plugin.getItems(itemsChan, errsChan, fileList)
close(itemsChan)
close(errsChan)
diff --git a/plugins/paligo.go b/plugins/paligo.go
index 1849aca0..4215b33a 100644
--- a/plugins/paligo.go
+++ b/plugins/paligo.go
@@ -1,352 +1,352 @@
-package plugins
-
-import (
- "context"
- "encoding/json"
- "fmt"
- "net/http"
- "strconv"
- "strings"
- "sync"
- "time"
-
- "github.com/checkmarx/2ms/lib/utils"
- "github.com/rs/zerolog/log"
- "github.com/spf13/cobra"
- "golang.org/x/time/rate"
-)
-
-const (
- paligoInstanceFlag = "instance"
- paligoUsernameFlag = "username"
- paligoTokenFlag = "token"
- paligoAuthFlag = "auth"
- paligoFolderFlag = "folder"
-)
-
-var (
- paligoInstanceArg string
- paligoFolderArg int
-)
-
-type PaligoPlugin struct {
- Plugin
- Channels
-
- username string
- token string
- auth string
-
- paligoApi *PaligoClient
-}
-
-func (p *PaligoPlugin) GetCredentials() (string, string) {
- return p.username, p.token
-}
-
-func (p *PaligoPlugin) GetAuthorizationHeader() string {
- if p.auth != "" {
- return fmt.Sprintf("Basic %s", p.auth)
- }
- return utils.CreateBasicAuthCredentials(p)
-}
-
-func (p *PaligoPlugin) GetName() string {
- return "paligo"
-}
-
-func (p *PaligoPlugin) DefineCommand(items chan ISourceItem, errors chan error) (*cobra.Command, error) {
- p.Channels = Channels{
- Items: items,
- Errors: errors,
- WaitGroup: &sync.WaitGroup{},
- }
-
- command := &cobra.Command{
- Use: fmt.Sprintf("%s --%s %s --%s %s --%s %s",
- p.GetName(),
- paligoInstanceFlag, strings.ToUpper(paligoInstanceFlag),
- paligoUsernameFlag, strings.ToUpper(paligoUsernameFlag),
- paligoTokenFlag, strings.ToUpper(paligoTokenFlag)),
- Short: "Scan Paligo instance",
- Long: "Scan Paligo instance for sensitive information.",
- Run: func(cmd *cobra.Command, args []string) {
- // Waits for MarkFlagsOneRequired https://github.com/spf13/cobra/pull/1952
- if p.auth == "" && (p.username == "" || p.token == "") {
- p.Channels.Errors <- fmt.Errorf("exactly one of the flags in the group %v must be set; none were set", []string{paligoAuthFlag, paligoUsernameFlag, paligoTokenFlag})
- return
- }
- log.Info().Msg("Paligo plugin started")
- p.getItems()
- p.WaitGroup.Wait()
- close(items)
- },
- }
-
- command.Flags().StringVar(&paligoInstanceArg, paligoInstanceFlag, "", "Paligo instance name [required]")
- err := command.MarkFlagRequired(paligoInstanceFlag)
- if err != nil {
- return nil, fmt.Errorf("error while marking flag %s as required: %w", paligoInstanceFlag, err)
- }
-
- command.Flags().StringVar(&p.username, paligoUsernameFlag, "", "Paligo username")
- command.Flags().StringVar(&p.token, paligoTokenFlag, "", "Paligo token")
- command.MarkFlagsRequiredTogether(paligoUsernameFlag, paligoTokenFlag)
-
- command.Flags().StringVar(&p.auth, paligoAuthFlag, "", "Paligo encoded username:password")
- command.MarkFlagsMutuallyExclusive(paligoUsernameFlag, paligoAuthFlag)
- command.MarkFlagsMutuallyExclusive(paligoTokenFlag, paligoAuthFlag)
-
- command.Flags().IntVar(&paligoFolderArg, paligoFolderFlag, 0, "Paligo folder ID. If not specified, the whole instance will be scanned")
-
- return command, nil
-}
-
-func (p *PaligoPlugin) getItems() {
- p.paligoApi = newPaligoApi(paligoInstanceArg, p)
-
- foldersToProcess, err := p.getFirstProcessingFolders()
- if err != nil {
- p.Channels.Errors <- err
- return
- }
-
- itemsChan := p.processFolders(foldersToProcess)
-
- p.WaitGroup.Add(1)
- go func() {
- defer p.WaitGroup.Done()
- for item := range itemsChan {
- p.handleComponent(item)
- }
- }()
-}
-
-func (p *PaligoPlugin) getFirstProcessingFolders() ([]PaligoItem, error) {
- foldersToProcess := []PaligoItem{}
-
- if paligoFolderArg != 0 {
- foldersToProcess = append(foldersToProcess, PaligoItem{ID: paligoFolderArg, Name: "ID" + fmt.Sprint(paligoFolderArg)})
- } else {
- folders, err := p.paligoApi.listFolders()
- if err != nil {
- log.Error().Err(err).Msg("error while getting root folders")
- return nil, fmt.Errorf("error while getting root folders: %w", err)
- }
- for _, folder := range *folders {
- foldersToProcess = append(foldersToProcess, folder.PaligoItem)
- }
- }
- return foldersToProcess, nil
-}
-
-func (p *PaligoPlugin) processFolders(foldersToProcess []PaligoItem) chan PaligoItem {
-
- itemsChan := make(chan PaligoItem)
-
- p.WaitGroup.Add(1)
- go func() {
- defer p.WaitGroup.Done()
-
- for len(foldersToProcess) > 0 {
- folder := foldersToProcess[0]
- foldersToProcess = foldersToProcess[1:]
-
- log.Info().Msgf("Getting folder %s", folder.Name)
- folderInfo, err := p.paligoApi.showFolder(folder.ID)
- if err != nil {
- log.Error().Err(err).Msgf("error while getting %s '%s'", folder.Type, folder.Name)
- p.Channels.Errors <- err
- continue
- }
-
- for _, child := range folderInfo.Children {
- if child.Type == "component" {
- itemsChan <- child
- } else if child.Type == "folder" {
- foldersToProcess = append(foldersToProcess, child)
- }
- }
- }
- close(itemsChan)
- }()
-
- return itemsChan
-}
-
-func (p *PaligoPlugin) handleComponent(paligoItem PaligoItem) {
-
- log.Info().Msgf("Getting component %s", paligoItem.Name)
- document, err := p.paligoApi.showDocument(paligoItem.ID)
- if err != nil {
- log.Error().Err(err).Msgf("error while getting document '%s'", paligoItem.Name)
- p.Channels.Errors <- fmt.Errorf("error while getting document '%s': %w", paligoItem.Name, err)
- return
- }
-
- url := fmt.Sprintf("https://%s.paligoapp.com/document/edit/%d", p.paligoApi.Instance, document.ID)
-
- p.Items <- item{
- Content: &document.Content,
- ID: fmt.Sprintf("%s-%s-%d", p.GetName(), p.paligoApi.Instance, document.ID),
- Source: url,
- }
-}
-
-/**
- * Paligo API
- */
-
-// https://paligo.net/docs/apidocs/en/index-en.html#UUID-a5b548af-9a37-d305-f5a8-11142d86fe20
-const (
- PALIGO_RATE_LIMIT_CHECK_INTERVAL = 5 * time.Second
- PALIGO_DOCUMENT_SHOW_LIMIT = 50
- PALIGO_FOLDER_SHOW_LIMIT = 50
-)
-
-func rateLimitPerSecond(rateLimit int) rate.Limit {
- return rate.Every(time.Minute / time.Duration(rateLimit))
-}
-
-type PaligoItem struct {
- ID int `json:"id"`
- Name string `json:"name"`
- UUID string `json:"uuid"`
- Type string `json:"type"`
-}
-
-type Folder struct {
- PaligoItem
- Children []PaligoItem `json:"children"`
-}
-
-type EmptyFolder struct {
- PaligoItem
- Children string `json:"children"`
-}
-
-type Component struct {
- PaligoItem
- Subtype string `json:"subtype"`
- Creator int `json:"creator"`
- Owner int `json:"owner"`
- Author int `json:"author"`
- CreatedAt int `json:"created_at"`
- ModifiedAt int `json:"modified_at"`
- Checkout bool `json:"checkout"`
- CheckoutUser string `json:"checkout_user"`
- ParentResource int `json:"parent_resource"`
- Taxonomies []interface{} `json:"taxonomies"`
- ReleaseStatus string `json:"release_status"`
- Content string `json:"content"`
- Languages []string `json:"languages"`
- External []interface{} `json:"external"`
- CustomAttributes []interface{} `json:"custom_attributes"`
-}
-
-type ListFoldersResponse struct {
- Page int `json:"page"`
- NextPage string `json:"next_page"`
- TotalPages int `json:"total_pages"`
- Folders []EmptyFolder `json:"folders"`
-}
-
-type Document struct {
- PaligoItem
- Content string `json:"content"`
- Languages []string `json:"languages"`
-}
-
-type PaligoClient struct {
- Instance string
- auth utils.IAuthorizationHeader
-
- foldersLimiter *rate.Limiter
- documentsLimiter *rate.Limiter
-}
-
-func reserveRateLimit(response *http.Response, lim *rate.Limiter, err error) error {
- if response.StatusCode != 429 {
- return err
- }
-
- rateLimit := response.Header.Get("Retry-After")
- if rateLimit == "" {
- return fmt.Errorf("Retry-After header not found")
- }
- seconds, err := strconv.Atoi(rateLimit)
- if err != nil {
- return fmt.Errorf("error parsing Retry-After header: %w", err)
- }
- log.Warn().Msgf("Rate limit exceeded, need to wait for %d seconds", seconds)
- lim.SetBurst(1)
- time.Sleep(time.Second * time.Duration(seconds))
- return nil
-}
-
-func (p *PaligoClient) request(endpoint string, lim *rate.Limiter) ([]byte, error) {
- if err := lim.Wait(context.Background()); err != nil {
- log.Error().Msgf("Error waiting for rate limiter: %s", err)
- return nil, err
- }
-
- url := fmt.Sprintf("https://%s.paligoapp.com/api/v2/%s", p.Instance, endpoint)
- body, response, err := utils.HttpRequest("GET", url, p.auth, utils.RetrySettings{})
- if err != nil {
- if err := reserveRateLimit(response, lim, err); err != nil {
- return nil, err
- }
- return p.request(endpoint, lim)
- }
-
- return body, nil
-}
-
-func (p *PaligoClient) listFolders() (*[]EmptyFolder, error) {
- body, err := p.request("folders", p.foldersLimiter)
- if err != nil {
- return nil, err
- }
-
- var folders *ListFoldersResponse
- err = json.Unmarshal(body, &folders)
- if err != nil {
- return nil, fmt.Errorf("error parsing folders response: %w", err)
- }
-
- return &folders.Folders, nil
-}
-
-func (p *PaligoClient) showFolder(folderId int) (*Folder, error) {
- body, err := p.request(fmt.Sprintf("folders/%d", folderId), p.foldersLimiter)
- if err != nil {
- return nil, err
- }
-
- folder := &Folder{}
- err = json.Unmarshal(body, folder)
-
- return folder, err
-}
-
-func (p *PaligoClient) showDocument(documentId int) (*Document, error) {
- body, err := p.request(fmt.Sprintf("documents/%d", documentId), p.documentsLimiter)
- if err != nil {
- return nil, err
- }
-
- document := &Document{}
- err = json.Unmarshal(body, document)
-
- return document, err
-}
-
-func newPaligoApi(instance string, auth utils.IAuthorizationHeader) *PaligoClient {
- return &PaligoClient{
- Instance: instance,
- auth: auth,
-
- foldersLimiter: rate.NewLimiter(rateLimitPerSecond(PALIGO_FOLDER_SHOW_LIMIT), PALIGO_FOLDER_SHOW_LIMIT),
- documentsLimiter: rate.NewLimiter(rateLimitPerSecond(PALIGO_DOCUMENT_SHOW_LIMIT), PALIGO_DOCUMENT_SHOW_LIMIT),
- }
-}
+package plugins
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+
+ "github.com/checkmarx/2ms/lib/utils"
+ "github.com/rs/zerolog/log"
+ "github.com/spf13/cobra"
+ "golang.org/x/time/rate"
+)
+
+const (
+ paligoInstanceFlag = "instance"
+ paligoUsernameFlag = "username"
+ paligoTokenFlag = "token"
+ paligoAuthFlag = "auth"
+ paligoFolderFlag = "folder"
+)
+
+var (
+ paligoInstanceArg string
+ paligoFolderArg int
+)
+
+type PaligoPlugin struct {
+ Plugin
+ Channels
+
+ username string
+ token string
+ auth string
+
+ paligoApi *PaligoClient
+}
+
+func (p *PaligoPlugin) GetCredentials() (string, string) {
+ return p.username, p.token
+}
+
+func (p *PaligoPlugin) GetAuthorizationHeader() string {
+ if p.auth != "" {
+ return fmt.Sprintf("Basic %s", p.auth)
+ }
+ return utils.CreateBasicAuthCredentials(p)
+}
+
+func (p *PaligoPlugin) GetName() string {
+ return "paligo"
+}
+
+func (p *PaligoPlugin) DefineCommand(items chan ISourceItem, errors chan error) (*cobra.Command, error) {
+ p.Channels = Channels{
+ Items: items,
+ Errors: errors,
+ WaitGroup: &sync.WaitGroup{},
+ }
+
+ command := &cobra.Command{
+ Use: fmt.Sprintf("%s --%s %s --%s %s --%s %s",
+ p.GetName(),
+ paligoInstanceFlag, strings.ToUpper(paligoInstanceFlag),
+ paligoUsernameFlag, strings.ToUpper(paligoUsernameFlag),
+ paligoTokenFlag, strings.ToUpper(paligoTokenFlag)),
+ Short: "Scan Paligo instance",
+ Long: "Scan Paligo instance for sensitive information.",
+ Run: func(cmd *cobra.Command, args []string) {
+ // Waits for MarkFlagsOneRequired https://github.com/spf13/cobra/pull/1952
+ if p.auth == "" && (p.username == "" || p.token == "") {
+ p.Channels.Errors <- fmt.Errorf("exactly one of the flags in the group %v must be set; none were set", []string{paligoAuthFlag, paligoUsernameFlag, paligoTokenFlag})
+ return
+ }
+ log.Info().Msg("Paligo plugin started")
+ p.getItems()
+ p.WaitGroup.Wait()
+ close(items)
+ },
+ }
+
+ command.Flags().StringVar(&paligoInstanceArg, paligoInstanceFlag, "", "Paligo instance name [required]")
+ err := command.MarkFlagRequired(paligoInstanceFlag)
+ if err != nil {
+ return nil, fmt.Errorf("error while marking flag %s as required: %w", paligoInstanceFlag, err)
+ }
+
+ command.Flags().StringVar(&p.username, paligoUsernameFlag, "", "Paligo username")
+ command.Flags().StringVar(&p.token, paligoTokenFlag, "", "Paligo token")
+ command.MarkFlagsRequiredTogether(paligoUsernameFlag, paligoTokenFlag)
+
+ command.Flags().StringVar(&p.auth, paligoAuthFlag, "", "Paligo encoded username:password")
+ command.MarkFlagsMutuallyExclusive(paligoUsernameFlag, paligoAuthFlag)
+ command.MarkFlagsMutuallyExclusive(paligoTokenFlag, paligoAuthFlag)
+
+ command.Flags().IntVar(&paligoFolderArg, paligoFolderFlag, 0, "Paligo folder ID. If not specified, the whole instance will be scanned")
+
+ return command, nil
+}
+
+func (p *PaligoPlugin) getItems() {
+ p.paligoApi = newPaligoApi(paligoInstanceArg, p)
+
+ foldersToProcess, err := p.getFirstProcessingFolders()
+ if err != nil {
+ p.Channels.Errors <- err
+ return
+ }
+
+ itemsChan := p.processFolders(foldersToProcess)
+
+ p.WaitGroup.Add(1)
+ go func() {
+ defer p.WaitGroup.Done()
+ for item := range itemsChan {
+ p.handleComponent(item)
+ }
+ }()
+}
+
+func (p *PaligoPlugin) getFirstProcessingFolders() ([]PaligoItem, error) {
+ foldersToProcess := []PaligoItem{}
+
+ if paligoFolderArg != 0 {
+ foldersToProcess = append(foldersToProcess, PaligoItem{ID: paligoFolderArg, Name: "ID" + fmt.Sprint(paligoFolderArg)})
+ } else {
+ folders, err := p.paligoApi.listFolders()
+ if err != nil {
+ log.Error().Err(err).Msg("error while getting root folders")
+ return nil, fmt.Errorf("error while getting root folders: %w", err)
+ }
+ for _, folder := range *folders {
+ foldersToProcess = append(foldersToProcess, folder.PaligoItem)
+ }
+ }
+ return foldersToProcess, nil
+}
+
+func (p *PaligoPlugin) processFolders(foldersToProcess []PaligoItem) chan PaligoItem {
+
+ itemsChan := make(chan PaligoItem)
+
+ p.WaitGroup.Add(1)
+ go func() {
+ defer p.WaitGroup.Done()
+
+ for len(foldersToProcess) > 0 {
+ folder := foldersToProcess[0]
+ foldersToProcess = foldersToProcess[1:]
+
+ log.Info().Msgf("Getting folder %s", folder.Name)
+ folderInfo, err := p.paligoApi.showFolder(folder.ID)
+ if err != nil {
+ log.Error().Err(err).Msgf("error while getting %s '%s'", folder.Type, folder.Name)
+ p.Channels.Errors <- err
+ continue
+ }
+
+ for _, child := range folderInfo.Children {
+ if child.Type == "component" {
+ itemsChan <- child
+ } else if child.Type == "folder" {
+ foldersToProcess = append(foldersToProcess, child)
+ }
+ }
+ }
+ close(itemsChan)
+ }()
+
+ return itemsChan
+}
+
+func (p *PaligoPlugin) handleComponent(paligoItem PaligoItem) {
+
+ log.Info().Msgf("Getting component %s", paligoItem.Name)
+ document, err := p.paligoApi.showDocument(paligoItem.ID)
+ if err != nil {
+ log.Error().Err(err).Msgf("error while getting document '%s'", paligoItem.Name)
+ p.Channels.Errors <- fmt.Errorf("error while getting document '%s': %w", paligoItem.Name, err)
+ return
+ }
+
+ url := fmt.Sprintf("https://%s.paligoapp.com/document/edit/%d", p.paligoApi.Instance, document.ID)
+
+ p.Items <- item{
+ Content: &document.Content,
+ ID: fmt.Sprintf("%s-%s-%d", p.GetName(), p.paligoApi.Instance, document.ID),
+ Source: url,
+ }
+}
+
+/**
+ * Paligo API
+ */
+
+// https://paligo.net/docs/apidocs/en/index-en.html#UUID-a5b548af-9a37-d305-f5a8-11142d86fe20
+const (
+ PALIGO_RATE_LIMIT_CHECK_INTERVAL = 5 * time.Second
+ PALIGO_DOCUMENT_SHOW_LIMIT = 50
+ PALIGO_FOLDER_SHOW_LIMIT = 50
+)
+
+func rateLimitPerSecond(rateLimit int) rate.Limit {
+ return rate.Every(time.Minute / time.Duration(rateLimit))
+}
+
+type PaligoItem struct {
+ ID int `json:"id"`
+ Name string `json:"name"`
+ UUID string `json:"uuid"`
+ Type string `json:"type"`
+}
+
+type Folder struct {
+ PaligoItem
+ Children []PaligoItem `json:"children"`
+}
+
+type EmptyFolder struct {
+ PaligoItem
+ Children string `json:"children"`
+}
+
+type Component struct {
+ PaligoItem
+ Subtype string `json:"subtype"`
+ Creator int `json:"creator"`
+ Owner int `json:"owner"`
+ Author int `json:"author"`
+ CreatedAt int `json:"created_at"`
+ ModifiedAt int `json:"modified_at"`
+ Checkout bool `json:"checkout"`
+ CheckoutUser string `json:"checkout_user"`
+ ParentResource int `json:"parent_resource"`
+ Taxonomies []interface{} `json:"taxonomies"`
+ ReleaseStatus string `json:"release_status"`
+ Content string `json:"content"`
+ Languages []string `json:"languages"`
+ External []interface{} `json:"external"`
+ CustomAttributes []interface{} `json:"custom_attributes"`
+}
+
+type ListFoldersResponse struct {
+ Page int `json:"page"`
+ NextPage string `json:"next_page"`
+ TotalPages int `json:"total_pages"`
+ Folders []EmptyFolder `json:"folders"`
+}
+
+type Document struct {
+ PaligoItem
+ Content string `json:"content"`
+ Languages []string `json:"languages"`
+}
+
+type PaligoClient struct {
+ Instance string
+ auth utils.IAuthorizationHeader
+
+ foldersLimiter *rate.Limiter
+ documentsLimiter *rate.Limiter
+}
+
+func reserveRateLimit(response *http.Response, lim *rate.Limiter, err error) error {
+ if response.StatusCode != 429 {
+ return err
+ }
+
+ rateLimit := response.Header.Get("Retry-After")
+ if rateLimit == "" {
+ return fmt.Errorf("Retry-After header not found")
+ }
+ seconds, err := strconv.Atoi(rateLimit)
+ if err != nil {
+ return fmt.Errorf("error parsing Retry-After header: %w", err)
+ }
+ log.Warn().Msgf("Rate limit exceeded, need to wait for %d seconds", seconds)
+ lim.SetBurst(1)
+ time.Sleep(time.Second * time.Duration(seconds))
+ return nil
+}
+
+func (p *PaligoClient) request(endpoint string, lim *rate.Limiter) ([]byte, error) {
+ if err := lim.Wait(context.Background()); err != nil {
+ log.Error().Msgf("Error waiting for rate limiter: %s", err)
+ return nil, err
+ }
+
+ url := fmt.Sprintf("https://%s.paligoapp.com/api/v2/%s", p.Instance, endpoint)
+ body, response, err := utils.HttpRequest("GET", url, p.auth, utils.RetrySettings{})
+ if err != nil {
+ if err := reserveRateLimit(response, lim, err); err != nil {
+ return nil, err
+ }
+ return p.request(endpoint, lim)
+ }
+
+ return body, nil
+}
+
+func (p *PaligoClient) listFolders() (*[]EmptyFolder, error) {
+ body, err := p.request("folders", p.foldersLimiter)
+ if err != nil {
+ return nil, err
+ }
+
+ var folders *ListFoldersResponse
+ err = json.Unmarshal(body, &folders)
+ if err != nil {
+ return nil, fmt.Errorf("error parsing folders response: %w", err)
+ }
+
+ return &folders.Folders, nil
+}
+
+func (p *PaligoClient) showFolder(folderId int) (*Folder, error) {
+ body, err := p.request(fmt.Sprintf("folders/%d", folderId), p.foldersLimiter)
+ if err != nil {
+ return nil, err
+ }
+
+ folder := &Folder{}
+ err = json.Unmarshal(body, folder)
+
+ return folder, err
+}
+
+func (p *PaligoClient) showDocument(documentId int) (*Document, error) {
+ body, err := p.request(fmt.Sprintf("documents/%d", documentId), p.documentsLimiter)
+ if err != nil {
+ return nil, err
+ }
+
+ document := &Document{}
+ err = json.Unmarshal(body, document)
+
+ return document, err
+}
+
+func newPaligoApi(instance string, auth utils.IAuthorizationHeader) *PaligoClient {
+ return &PaligoClient{
+ Instance: instance,
+ auth: auth,
+
+ foldersLimiter: rate.NewLimiter(rateLimitPerSecond(PALIGO_FOLDER_SHOW_LIMIT), PALIGO_FOLDER_SHOW_LIMIT),
+ documentsLimiter: rate.NewLimiter(rateLimitPerSecond(PALIGO_DOCUMENT_SHOW_LIMIT), PALIGO_DOCUMENT_SHOW_LIMIT),
+ }
+}
diff --git a/plugins/paligo_test.go b/plugins/paligo_test.go
index 99f529fa..901d94c1 100644
--- a/plugins/paligo_test.go
+++ b/plugins/paligo_test.go
@@ -1,115 +1,115 @@
-package plugins
-
-import (
- "fmt"
- "github.com/stretchr/testify/assert"
- "golang.org/x/time/rate"
- "net/http"
- "testing"
- "time"
-)
-
-func TestReserveRateLimit(t *testing.T) {
- tests := []struct {
- name string
- response *http.Response
- limiter *rate.Limiter
- inputErr error
- expectedErrSub string
- expectedBurst int
- minSleep int64
- maxSleep int64
- }{
- {
- name: "Non-429 status returns input error",
- response: &http.Response{
- StatusCode: 200,
- },
- limiter: rate.NewLimiter(1, 10),
- inputErr: fmt.Errorf("non rate limit error"),
- expectedErrSub: "non rate limit error",
- expectedBurst: 10,
- minSleep: 0,
- maxSleep: 0,
- },
- {
- name: "429 status missing Retry-After header returns error",
- response: &http.Response{
- StatusCode: 429,
- Header: http.Header{},
- },
- limiter: rate.NewLimiter(1, 10),
- inputErr: nil,
- expectedErrSub: "Retry-After header not found",
- expectedBurst: 10,
- minSleep: 0,
- maxSleep: 0,
- },
- {
- name: "429 status with invalid Retry-After header returns error",
- response: &http.Response{
- StatusCode: 429,
- Header: http.Header{
- "Retry-After": []string{"abc"},
- },
- },
- limiter: rate.NewLimiter(1, 10),
- inputErr: nil,
- expectedErrSub: "error parsing Retry-After header",
- expectedBurst: 10,
- minSleep: 0,
- maxSleep: 0,
- },
- {
- name: "429 status with valid Retry-After header (0) returns nil and sets burst to 1 with minimal sleep",
- response: &http.Response{
- StatusCode: 429,
- Header: http.Header{
- "Retry-After": []string{"0"},
- },
- },
- limiter: rate.NewLimiter(1, 10),
- inputErr: nil,
- expectedErrSub: "",
- expectedBurst: 1,
- minSleep: 0,
- maxSleep: 50,
- },
- {
- name: "429 status with valid Retry-After header (1) returns nil and sets burst to 1 with ~1 sec sleep",
- response: &http.Response{
- StatusCode: 429,
- Header: http.Header{
- "Retry-After": []string{"1"},
- },
- },
- limiter: rate.NewLimiter(1, 10),
- inputErr: nil,
- expectedErrSub: "",
- expectedBurst: 1,
- minSleep: 1000,
- maxSleep: 1050,
- },
- }
-
- for _, tc := range tests {
- t.Run(tc.name, func(t *testing.T) {
- start := time.Now()
- err := reserveRateLimit(tc.response, tc.limiter, tc.inputErr)
- duration := time.Since(start).Milliseconds()
-
- if tc.expectedErrSub != "" {
- assert.Error(t, err, "expected an error")
- assert.Contains(t, err.Error(), tc.expectedErrSub, "error message mismatch")
- } else {
- assert.NoError(t, err, "expected no error")
- if tc.maxSleep > 0 {
- assert.GreaterOrEqual(t, duration, tc.minSleep, "expected sleep of at least %d ms", tc.minSleep)
- assert.Less(t, duration, tc.maxSleep, "expected sleep of less than %d ms", tc.maxSleep)
- }
- }
-
- assert.Equal(t, tc.expectedBurst, tc.limiter.Burst(), "limiter burst mismatch")
- })
- }
-}
+package plugins
+
+import (
+ "fmt"
+ "github.com/stretchr/testify/assert"
+ "golang.org/x/time/rate"
+ "net/http"
+ "testing"
+ "time"
+)
+
+func TestReserveRateLimit(t *testing.T) {
+ tests := []struct {
+ name string
+ response *http.Response
+ limiter *rate.Limiter
+ inputErr error
+ expectedErrSub string
+ expectedBurst int
+ minSleep int64
+ maxSleep int64
+ }{
+ {
+ name: "Non-429 status returns input error",
+ response: &http.Response{
+ StatusCode: 200,
+ },
+ limiter: rate.NewLimiter(1, 10),
+ inputErr: fmt.Errorf("non rate limit error"),
+ expectedErrSub: "non rate limit error",
+ expectedBurst: 10,
+ minSleep: 0,
+ maxSleep: 0,
+ },
+ {
+ name: "429 status missing Retry-After header returns error",
+ response: &http.Response{
+ StatusCode: 429,
+ Header: http.Header{},
+ },
+ limiter: rate.NewLimiter(1, 10),
+ inputErr: nil,
+ expectedErrSub: "Retry-After header not found",
+ expectedBurst: 10,
+ minSleep: 0,
+ maxSleep: 0,
+ },
+ {
+ name: "429 status with invalid Retry-After header returns error",
+ response: &http.Response{
+ StatusCode: 429,
+ Header: http.Header{
+ "Retry-After": []string{"abc"},
+ },
+ },
+ limiter: rate.NewLimiter(1, 10),
+ inputErr: nil,
+ expectedErrSub: "error parsing Retry-After header",
+ expectedBurst: 10,
+ minSleep: 0,
+ maxSleep: 0,
+ },
+ {
+ name: "429 status with valid Retry-After header (0) returns nil and sets burst to 1 with minimal sleep",
+ response: &http.Response{
+ StatusCode: 429,
+ Header: http.Header{
+ "Retry-After": []string{"0"},
+ },
+ },
+ limiter: rate.NewLimiter(1, 10),
+ inputErr: nil,
+ expectedErrSub: "",
+ expectedBurst: 1,
+ minSleep: 0,
+ maxSleep: 50,
+ },
+ {
+ name: "429 status with valid Retry-After header (1) returns nil and sets burst to 1 with ~1 sec sleep",
+ response: &http.Response{
+ StatusCode: 429,
+ Header: http.Header{
+ "Retry-After": []string{"1"},
+ },
+ },
+ limiter: rate.NewLimiter(1, 10),
+ inputErr: nil,
+ expectedErrSub: "",
+ expectedBurst: 1,
+ minSleep: 1000,
+ maxSleep: 1050,
+ },
+ }
+
+ for _, tc := range tests {
+ t.Run(tc.name, func(t *testing.T) {
+ start := time.Now()
+ err := reserveRateLimit(tc.response, tc.limiter, tc.inputErr)
+ duration := time.Since(start).Milliseconds()
+
+ if tc.expectedErrSub != "" {
+ assert.Error(t, err, "expected an error")
+ assert.Contains(t, err.Error(), tc.expectedErrSub, "error message mismatch")
+ } else {
+ assert.NoError(t, err, "expected no error")
+ if tc.maxSleep > 0 {
+ assert.GreaterOrEqual(t, duration, tc.minSleep, "expected sleep of at least %d ms", tc.minSleep)
+ assert.Less(t, duration, tc.maxSleep, "expected sleep of less than %d ms", tc.maxSleep)
+ }
+ }
+
+ assert.Equal(t, tc.expectedBurst, tc.limiter.Burst(), "limiter burst mismatch")
+ })
+ }
+}
diff --git a/plugins/slack.go b/plugins/slack.go
index 12dd04ad..22004a98 100644
--- a/plugins/slack.go
+++ b/plugins/slack.go
@@ -1,223 +1,223 @@
-package plugins
-
-import (
- "fmt"
- "strconv"
- "sync"
- "time"
-
- "github.com/rs/zerolog/log"
- "github.com/slack-go/slack"
- "github.com/spf13/cobra"
-)
-
-const (
- slackTokenFlag = "token"
- slackTeamFlag = "team"
- slackChannelFlag = "channel"
- slackBackwardDurationFlag = "duration"
- slackMessagesCountFlag = "messages-count"
-)
-
-const slackDefaultDateFrom = time.Hour * 24 * 14
-
-type SlackPlugin struct {
- Plugin
- Channels
- Token string
-}
-
-func (p *SlackPlugin) GetName() string {
- return "slack"
-}
-
-var (
- tokenArg string
- teamArg string
- channelsArg []string
- backwardDurationArg time.Duration
- messagesCountArg int
-)
-
-func (p *SlackPlugin) DefineCommand(items chan ISourceItem, errors chan error) (*cobra.Command, error) {
- p.Channels = Channels{
- Items: items,
- Errors: errors,
- WaitGroup: &sync.WaitGroup{},
- }
-
- command := &cobra.Command{
- Use: fmt.Sprintf("%s --%s TOKEN --%s TEAM", p.GetName(), slackTokenFlag, slackTeamFlag),
- Short: "Scan Slack team",
- Long: "Scan Slack team for sensitive information.",
- Run: func(cmd *cobra.Command, args []string) {
- p.getItems()
- p.Channels.WaitGroup.Wait()
- close(items)
- },
- }
-
- command.Flags().StringVar(&tokenArg, slackTokenFlag, "", "Slack token [required]")
- err := command.MarkFlagRequired(slackTokenFlag)
- if err != nil {
- return nil, fmt.Errorf("error while marking flag %s as required: %w", slackTokenFlag, err)
- }
- command.Flags().StringVar(&teamArg, slackTeamFlag, "", "Slack team name or ID [required]")
- err = command.MarkFlagRequired(slackTeamFlag)
- if err != nil {
- return nil, fmt.Errorf("error while marking flag %s as required: %w", slackTeamFlag, err)
- }
- command.Flags().StringSliceVar(&channelsArg, slackChannelFlag, []string{}, "Slack channels to scan")
- command.Flags().DurationVar(&backwardDurationArg, slackBackwardDurationFlag, slackDefaultDateFrom, "Slack backward duration for messages (ex: 24h, 7d, 1M, 1y)")
- command.Flags().IntVar(&messagesCountArg, slackMessagesCountFlag, 0, "Slack messages count to scan (0 = all messages)")
-
- return command, nil
-}
-
-func (p *SlackPlugin) getItems() {
- slackApi := slack.New(tokenArg)
-
- team, err := getTeam(slackApi, teamArg)
- if err != nil {
- p.Errors <- fmt.Errorf("error while getting team: %w", err)
- return
- }
-
- channels, err := getChannels(slackApi, team.ID, channelsArg)
- if err != nil {
- p.Errors <- fmt.Errorf("error while getting channels for team %s: %w", team.Name, err)
- return
- }
- if len(*channels) == 0 {
- log.Warn().Msgf("No channels found for team %s", team.Name)
- return
- }
-
- log.Info().Msgf("Found %d channels for team %s", len(*channels), team.Name)
- p.WaitGroup.Add(len(*channels))
- for _, channel := range *channels {
- go p.getItemsFromChannel(slackApi, channel)
- }
-}
-
-func (p *SlackPlugin) getItemsFromChannel(slackApi *slack.Client, channel slack.Channel) {
- defer p.WaitGroup.Done()
- log.Info().Msgf("Getting items from channel %s", channel.Name)
-
- cursor := ""
- counter := 0
- for {
- history, err := slackApi.GetConversationHistory(&slack.GetConversationHistoryParameters{
- Cursor: cursor,
- ChannelID: channel.ID,
- })
- if err != nil {
- p.Errors <- fmt.Errorf("error while getting history for channel %s: %w", channel.Name, err)
- return
- }
- for _, message := range history.Messages {
- outOfRange, err := isMessageOutOfRange(message, backwardDurationArg, counter, messagesCountArg)
- if err != nil {
- p.Errors <- fmt.Errorf("error while checking message: %w", err)
- return
- }
- if outOfRange {
- break
- }
- if message.Text != "" {
- url, err := slackApi.GetPermalink(&slack.PermalinkParameters{Channel: channel.ID, Ts: message.Timestamp})
- if err != nil {
- log.Warn().Msgf("Error while getting permalink for message %s: %s", message.Timestamp, err)
- url = fmt.Sprintf("Channel: %s; Message: %s", channel.Name, message.Timestamp)
- }
- p.Items <- item{
- Content: &message.Text,
- ID: fmt.Sprintf("%s-%s-%s", p.GetName(), channel.ID, message.Timestamp),
- Source: url,
- }
- }
- counter++
- }
- if history.ResponseMetaData.NextCursor == "" {
- break
- }
- cursor = history.ResponseMetaData.NextCursor
- }
-}
-
-// Declare it to be consistent with all comparaisons
-var timeNow = time.Now()
-
-func isMessageOutOfRange(message slack.Message, backwardDuration time.Duration, currentMessagesCount int, limitMessagesCount int) (bool, error) {
- if backwardDuration != 0 {
- timestamp, err := strconv.ParseFloat(message.Timestamp, 64)
- if err != nil {
- return true, fmt.Errorf("error while parsing timestamp: %w", err)
- }
- messageDate := time.Unix(int64(timestamp), 0)
- if messageDate.Before(timeNow.Add(-backwardDuration)) {
- return true, nil
- }
- }
- if limitMessagesCount != 0 && currentMessagesCount >= limitMessagesCount {
- return true, nil
- }
- return false, nil
-}
-
-type ISlackClient interface {
- GetConversations(*slack.GetConversationsParameters) ([]slack.Channel, string, error)
- ListTeams(slack.ListTeamsParameters) ([]slack.Team, string, error)
-}
-
-func getTeam(slackApi ISlackClient, teamName string) (*slack.Team, error) {
- cursorHolder := ""
- for {
- teams, cursor, err := slackApi.ListTeams(slack.ListTeamsParameters{Cursor: cursorHolder})
- if err != nil {
- return nil, fmt.Errorf("error while getting teams: %w", err)
- }
- for _, team := range teams {
- if team.Name == teamName || team.ID == teamName {
- return &team, nil
- }
- }
- if cursor == "" {
- break
- }
- cursorHolder = cursor
- }
- return nil, fmt.Errorf("team '%s' not found", teamName)
-}
-
-func getChannels(slackApi ISlackClient, teamId string, wantedChannels []string) (*[]slack.Channel, error) {
- cursorHolder := ""
- selectedChannels := []slack.Channel{}
- for {
- channels, cursor, err := slackApi.GetConversations(&slack.GetConversationsParameters{
- Cursor: cursorHolder,
- TeamID: teamId,
- })
- if err != nil {
- return nil, fmt.Errorf("error while getting channels: %w", err)
- }
- if len(wantedChannels) == 0 {
- selectedChannels = append(selectedChannels, channels...)
- } else {
- for _, channel := range wantedChannels {
- for _, c := range channels {
- if c.Name == channel || c.ID == channel {
- selectedChannels = append(selectedChannels, c)
- }
- }
- }
- if len(selectedChannels) == len(wantedChannels) {
- return &selectedChannels, nil
- }
- }
- if cursor == "" {
- return &selectedChannels, nil
- }
- cursorHolder = cursor
- }
-}
+package plugins
+
+import (
+ "fmt"
+ "strconv"
+ "sync"
+ "time"
+
+ "github.com/rs/zerolog/log"
+ "github.com/slack-go/slack"
+ "github.com/spf13/cobra"
+)
+
+const (
+ slackTokenFlag = "token"
+ slackTeamFlag = "team"
+ slackChannelFlag = "channel"
+ slackBackwardDurationFlag = "duration"
+ slackMessagesCountFlag = "messages-count"
+)
+
+const slackDefaultDateFrom = time.Hour * 24 * 14
+
+type SlackPlugin struct {
+ Plugin
+ Channels
+ Token string
+}
+
+func (p *SlackPlugin) GetName() string {
+ return "slack"
+}
+
+var (
+ tokenArg string
+ teamArg string
+ channelsArg []string
+ backwardDurationArg time.Duration
+ messagesCountArg int
+)
+
+func (p *SlackPlugin) DefineCommand(items chan ISourceItem, errors chan error) (*cobra.Command, error) {
+ p.Channels = Channels{
+ Items: items,
+ Errors: errors,
+ WaitGroup: &sync.WaitGroup{},
+ }
+
+ command := &cobra.Command{
+ Use: fmt.Sprintf("%s --%s TOKEN --%s TEAM", p.GetName(), slackTokenFlag, slackTeamFlag),
+ Short: "Scan Slack team",
+ Long: "Scan Slack team for sensitive information.",
+ Run: func(cmd *cobra.Command, args []string) {
+ p.getItems()
+ p.Channels.WaitGroup.Wait()
+ close(items)
+ },
+ }
+
+ command.Flags().StringVar(&tokenArg, slackTokenFlag, "", "Slack token [required]")
+ err := command.MarkFlagRequired(slackTokenFlag)
+ if err != nil {
+ return nil, fmt.Errorf("error while marking flag %s as required: %w", slackTokenFlag, err)
+ }
+ command.Flags().StringVar(&teamArg, slackTeamFlag, "", "Slack team name or ID [required]")
+ err = command.MarkFlagRequired(slackTeamFlag)
+ if err != nil {
+ return nil, fmt.Errorf("error while marking flag %s as required: %w", slackTeamFlag, err)
+ }
+ command.Flags().StringSliceVar(&channelsArg, slackChannelFlag, []string{}, "Slack channels to scan")
+ command.Flags().DurationVar(&backwardDurationArg, slackBackwardDurationFlag, slackDefaultDateFrom, "Slack backward duration for messages (ex: 24h, 7d, 1M, 1y)")
+ command.Flags().IntVar(&messagesCountArg, slackMessagesCountFlag, 0, "Slack messages count to scan (0 = all messages)")
+
+ return command, nil
+}
+
+func (p *SlackPlugin) getItems() {
+ slackApi := slack.New(tokenArg)
+
+ team, err := getTeam(slackApi, teamArg)
+ if err != nil {
+ p.Errors <- fmt.Errorf("error while getting team: %w", err)
+ return
+ }
+
+ channels, err := getChannels(slackApi, team.ID, channelsArg)
+ if err != nil {
+ p.Errors <- fmt.Errorf("error while getting channels for team %s: %w", team.Name, err)
+ return
+ }
+ if len(*channels) == 0 {
+ log.Warn().Msgf("No channels found for team %s", team.Name)
+ return
+ }
+
+ log.Info().Msgf("Found %d channels for team %s", len(*channels), team.Name)
+ p.WaitGroup.Add(len(*channels))
+ for _, channel := range *channels {
+ go p.getItemsFromChannel(slackApi, channel)
+ }
+}
+
+func (p *SlackPlugin) getItemsFromChannel(slackApi *slack.Client, channel slack.Channel) {
+ defer p.WaitGroup.Done()
+ log.Info().Msgf("Getting items from channel %s", channel.Name)
+
+ cursor := ""
+ counter := 0
+ for {
+ history, err := slackApi.GetConversationHistory(&slack.GetConversationHistoryParameters{
+ Cursor: cursor,
+ ChannelID: channel.ID,
+ })
+ if err != nil {
+ p.Errors <- fmt.Errorf("error while getting history for channel %s: %w", channel.Name, err)
+ return
+ }
+ for _, message := range history.Messages {
+ outOfRange, err := isMessageOutOfRange(message, backwardDurationArg, counter, messagesCountArg)
+ if err != nil {
+ p.Errors <- fmt.Errorf("error while checking message: %w", err)
+ return
+ }
+ if outOfRange {
+ break
+ }
+ if message.Text != "" {
+ url, err := slackApi.GetPermalink(&slack.PermalinkParameters{Channel: channel.ID, Ts: message.Timestamp})
+ if err != nil {
+ log.Warn().Msgf("Error while getting permalink for message %s: %s", message.Timestamp, err)
+ url = fmt.Sprintf("Channel: %s; Message: %s", channel.Name, message.Timestamp)
+ }
+ p.Items <- item{
+ Content: &message.Text,
+ ID: fmt.Sprintf("%s-%s-%s", p.GetName(), channel.ID, message.Timestamp),
+ Source: url,
+ }
+ }
+ counter++
+ }
+ if history.ResponseMetaData.NextCursor == "" {
+ break
+ }
+ cursor = history.ResponseMetaData.NextCursor
+ }
+}
+
+// Declare it to be consistent with all comparaisons
+var timeNow = time.Now()
+
+func isMessageOutOfRange(message slack.Message, backwardDuration time.Duration, currentMessagesCount int, limitMessagesCount int) (bool, error) {
+ if backwardDuration != 0 {
+ timestamp, err := strconv.ParseFloat(message.Timestamp, 64)
+ if err != nil {
+ return true, fmt.Errorf("error while parsing timestamp: %w", err)
+ }
+ messageDate := time.Unix(int64(timestamp), 0)
+ if messageDate.Before(timeNow.Add(-backwardDuration)) {
+ return true, nil
+ }
+ }
+ if limitMessagesCount != 0 && currentMessagesCount >= limitMessagesCount {
+ return true, nil
+ }
+ return false, nil
+}
+
+type ISlackClient interface {
+ GetConversations(*slack.GetConversationsParameters) ([]slack.Channel, string, error)
+ ListTeams(slack.ListTeamsParameters) ([]slack.Team, string, error)
+}
+
+func getTeam(slackApi ISlackClient, teamName string) (*slack.Team, error) {
+ cursorHolder := ""
+ for {
+ teams, cursor, err := slackApi.ListTeams(slack.ListTeamsParameters{Cursor: cursorHolder})
+ if err != nil {
+ return nil, fmt.Errorf("error while getting teams: %w", err)
+ }
+ for _, team := range teams {
+ if team.Name == teamName || team.ID == teamName {
+ return &team, nil
+ }
+ }
+ if cursor == "" {
+ break
+ }
+ cursorHolder = cursor
+ }
+ return nil, fmt.Errorf("team '%s' not found", teamName)
+}
+
+func getChannels(slackApi ISlackClient, teamId string, wantedChannels []string) (*[]slack.Channel, error) {
+ cursorHolder := ""
+ selectedChannels := []slack.Channel{}
+ for {
+ channels, cursor, err := slackApi.GetConversations(&slack.GetConversationsParameters{
+ Cursor: cursorHolder,
+ TeamID: teamId,
+ })
+ if err != nil {
+ return nil, fmt.Errorf("error while getting channels: %w", err)
+ }
+ if len(wantedChannels) == 0 {
+ selectedChannels = append(selectedChannels, channels...)
+ } else {
+ for _, channel := range wantedChannels {
+ for _, c := range channels {
+ if c.Name == channel || c.ID == channel {
+ selectedChannels = append(selectedChannels, c)
+ }
+ }
+ }
+ if len(selectedChannels) == len(wantedChannels) {
+ return &selectedChannels, nil
+ }
+ }
+ if cursor == "" {
+ return &selectedChannels, nil
+ }
+ cursorHolder = cursor
+ }
+}
diff --git a/plugins/slack_test.go b/plugins/slack_test.go
index 6b45c74f..04a15c21 100644
--- a/plugins/slack_test.go
+++ b/plugins/slack_test.go
@@ -1,316 +1,316 @@
-package plugins
-
-import (
- "errors"
- "fmt"
- "github.com/stretchr/testify/assert"
- "strconv"
- "testing"
- "time"
-
- "github.com/slack-go/slack"
-)
-
-type ListTeamsResponse struct {
- Teams []slack.Team
- Cursor string
- Err error
-}
-
-type mockSlackClient struct {
- channels []slack.Channel
- err error
- listTeamsResponses []ListTeamsResponse
-}
-
-func (m *mockSlackClient) GetConversations(params *slack.GetConversationsParameters) ([]slack.Channel, string, error) {
- return m.channels, "", m.err
-}
-func (m *mockSlackClient) ListTeams(params slack.ListTeamsParameters) ([]slack.Team, string, error) {
- if len(m.listTeamsResponses) == 0 {
- return nil, "", nil
- }
- response := m.listTeamsResponses[0]
- m.listTeamsResponses = m.listTeamsResponses[1:]
- return response.Teams, response.Cursor, response.Err
-}
-
-func TestGetChannels(t *testing.T) {
-
- tests := []struct {
- name string
- slackApi mockSlackClient
- teamId string
- wantedChannels []string
- expectedResult []slack.Channel
- expectedError error
- }{
- {
- name: "get all channels",
- slackApi: mockSlackClient{
- channels: []slack.Channel{
- {GroupConversation: slack.GroupConversation{Name: "channel1", Conversation: slack.Conversation{ID: "C123456"}}},
- {GroupConversation: slack.GroupConversation{Name: "channel2", Conversation: slack.Conversation{ID: "C234567"}}},
- },
- },
- teamId: "T123456",
- wantedChannels: []string{},
- expectedResult: []slack.Channel{
- {GroupConversation: slack.GroupConversation{Name: "channel1", Conversation: slack.Conversation{ID: "C123456"}}},
- {GroupConversation: slack.GroupConversation{Name: "channel2", Conversation: slack.Conversation{ID: "C234567"}}},
- },
- expectedError: nil,
- },
- {
- name: "get specific channels",
- slackApi: mockSlackClient{
- channels: []slack.Channel{
- {GroupConversation: slack.GroupConversation{Name: "channel1", Conversation: slack.Conversation{ID: "C123456"}}},
- {GroupConversation: slack.GroupConversation{Name: "channel2", Conversation: slack.Conversation{ID: "C234567"}}},
- },
- },
- teamId: "T123456",
- wantedChannels: []string{"channel1", "C234567"},
- expectedResult: []slack.Channel{
- {GroupConversation: slack.GroupConversation{Name: "channel1", Conversation: slack.Conversation{ID: "C123456"}}},
- {GroupConversation: slack.GroupConversation{Name: "channel2", Conversation: slack.Conversation{ID: "C234567"}}},
- },
- expectedError: nil,
- },
- {
- name: "get specific channels not found",
- slackApi: mockSlackClient{
- channels: []slack.Channel{
- {GroupConversation: slack.GroupConversation{Name: "channel1", Conversation: slack.Conversation{ID: "C123456"}}},
- {GroupConversation: slack.GroupConversation{Name: "channel2", Conversation: slack.Conversation{ID: "C234567"}}},
- },
- },
- teamId: "T123456",
- wantedChannels: []string{"channel3", "C345678"},
- expectedResult: []slack.Channel{},
- expectedError: nil,
- },
- {
- name: "get channels error",
- slackApi: mockSlackClient{
- err: fmt.Errorf("some error"),
- channels: []slack.Channel{},
- },
- teamId: "T123456",
- wantedChannels: []string{},
- expectedResult: []slack.Channel{},
- expectedError: fmt.Errorf("error while getting channels: %w", errors.New("some error")),
- },
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- result, err := getChannels(&tt.slackApi, tt.teamId, tt.wantedChannels)
- if err != nil && tt.expectedError == nil {
- t.Errorf("unexpected error: %v", err)
- }
- if err == nil && tt.expectedError != nil {
- t.Errorf("expected error: %v, but got nil", tt.expectedError)
- }
- if err != nil && tt.expectedError != nil {
- return
- }
- if len(*result) != len(tt.expectedResult) {
- t.Errorf("expected %d channels, but got %d", len(tt.expectedResult), len(*result))
- }
- for i, c := range *result {
- if c.Name != tt.expectedResult[i].Name || c.ID != tt.expectedResult[i].ID {
- t.Errorf("expected channel %v, but got %v", tt.expectedResult[i], c)
- }
- }
- })
- }
-}
-
-func formatSecondsAnd6DigitsMiliseconds(t time.Time) string {
- n := float64(t.UnixMicro()) / float64(time.Millisecond)
- return strconv.FormatFloat(n, 'f', 6, 64)
-}
-
-const (
- noLimit = 0
-)
-
-func TestIsMessageOutOfRange(t *testing.T) {
- tests := []struct {
- name string
- message slack.Message
- backwardDuration time.Duration
- currentMessagesCount int
- limitMessagesCount int
- expectedOutOfRange bool
- }{
- {
- name: "message is within range",
- message: slack.Message{
- Msg: slack.Msg{
- Timestamp: formatSecondsAnd6DigitsMiliseconds(timeNow),
- },
- },
- backwardDuration: time.Minute,
- currentMessagesCount: 0,
- limitMessagesCount: noLimit,
- expectedOutOfRange: false,
- },
- {
- name: "message is out of range due to backward duration",
- message: slack.Message{
- Msg: slack.Msg{
- Timestamp: formatSecondsAnd6DigitsMiliseconds(timeNow.Add(-time.Minute * 2)),
- },
- },
- backwardDuration: time.Minute,
- currentMessagesCount: 0,
- limitMessagesCount: noLimit,
- expectedOutOfRange: true,
- },
- {
- name: "message is out of range due to message count limit",
- message: slack.Message{
- Msg: slack.Msg{
- Timestamp: formatSecondsAnd6DigitsMiliseconds(timeNow),
- },
- },
- backwardDuration: noLimit,
- currentMessagesCount: 1,
- limitMessagesCount: 1,
- expectedOutOfRange: true,
- },
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- outOfRange, err := isMessageOutOfRange(tt.message, tt.backwardDuration, tt.currentMessagesCount, tt.limitMessagesCount)
- if err != nil {
- t.Errorf("unexpected error: %v", err)
- }
- if outOfRange != tt.expectedOutOfRange {
- t.Errorf("expected outOfRange to be %v, but got %v", tt.expectedOutOfRange, outOfRange)
- }
- })
- }
-}
-
-func TestGetTeam(t *testing.T) {
- tests := []struct {
- name string
- teamNameToSearch string
- mockResponses []ListTeamsResponse
- expectedTeam *slack.Team
- expectedErrSubstr string
- }{
- {
- name: "ListTeams returns error",
- teamNameToSearch: "AnyTeam",
- mockResponses: []ListTeamsResponse{
- {
- Teams: nil,
- Cursor: "",
- Err: errors.New("some error"),
- },
- },
- expectedTeam: nil,
- expectedErrSubstr: "error while getting teams",
- },
- {
- name: "Team found by Name on first page",
- teamNameToSearch: "TeamA",
- mockResponses: []ListTeamsResponse{
- {
- Teams: []slack.Team{
- {ID: "2", Name: "OtherTeam"},
- {ID: "1", Name: "TeamA"},
- },
- Cursor: "",
- Err: nil,
- },
- },
- expectedTeam: &slack.Team{ID: "1", Name: "TeamA"},
- expectedErrSubstr: "",
- },
- {
- name: "Team found by ID on first page",
- teamNameToSearch: "TeamB",
- mockResponses: []ListTeamsResponse{
- {
- Teams: []slack.Team{
- {ID: "1", Name: "OtherTeam"},
- {ID: "TeamB", Name: "SomeTeam"},
- },
- Cursor: "",
- Err: nil,
- },
- },
- expectedTeam: &slack.Team{ID: "TeamB", Name: "SomeTeam"},
- expectedErrSubstr: "",
- },
- {
- name: "Team found in second page",
- teamNameToSearch: "TeamC",
- mockResponses: []ListTeamsResponse{
- {
- Teams: []slack.Team{
- {ID: "1", Name: "OtherTeam"},
- },
- Cursor: "cursor1",
- Err: nil,
- },
- {
- Teams: []slack.Team{
- {ID: "3", Name: "TeamC"},
- },
- Cursor: "",
- Err: nil,
- },
- },
- expectedTeam: &slack.Team{ID: "3", Name: "TeamC"},
- expectedErrSubstr: "",
- },
- {
- name: "Team not found",
- teamNameToSearch: "TeamNotFound",
- mockResponses: []ListTeamsResponse{
- {
- Teams: []slack.Team{
- {ID: "1", Name: "OtherTeam1"},
- },
- Cursor: "cursor1",
- Err: nil,
- },
- {
- Teams: []slack.Team{
- {ID: "2", Name: "OtherTeam2"},
- },
- Cursor: "",
- Err: nil,
- },
- },
- expectedTeam: nil,
- expectedErrSubstr: "team 'TeamNotFound' not found",
- },
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- client := &mockSlackClient{
- listTeamsResponses: tt.mockResponses,
- }
-
- team, err := getTeam(client, tt.teamNameToSearch)
- if tt.expectedTeam != nil {
- assert.NoError(t, err)
- assert.NotNil(t, team)
- assert.Equal(t, *tt.expectedTeam, *team)
- } else {
- assert.Nil(t, team)
- assert.Error(t, err)
- assert.Contains(t, err.Error(), tt.expectedErrSubstr)
- }
- })
- }
-}
+package plugins
+
+import (
+ "errors"
+ "fmt"
+ "github.com/stretchr/testify/assert"
+ "strconv"
+ "testing"
+ "time"
+
+ "github.com/slack-go/slack"
+)
+
+type ListTeamsResponse struct {
+ Teams []slack.Team
+ Cursor string
+ Err error
+}
+
+type mockSlackClient struct {
+ channels []slack.Channel
+ err error
+ listTeamsResponses []ListTeamsResponse
+}
+
+func (m *mockSlackClient) GetConversations(params *slack.GetConversationsParameters) ([]slack.Channel, string, error) {
+ return m.channels, "", m.err
+}
+func (m *mockSlackClient) ListTeams(params slack.ListTeamsParameters) ([]slack.Team, string, error) {
+ if len(m.listTeamsResponses) == 0 {
+ return nil, "", nil
+ }
+ response := m.listTeamsResponses[0]
+ m.listTeamsResponses = m.listTeamsResponses[1:]
+ return response.Teams, response.Cursor, response.Err
+}
+
+func TestGetChannels(t *testing.T) {
+
+ tests := []struct {
+ name string
+ slackApi mockSlackClient
+ teamId string
+ wantedChannels []string
+ expectedResult []slack.Channel
+ expectedError error
+ }{
+ {
+ name: "get all channels",
+ slackApi: mockSlackClient{
+ channels: []slack.Channel{
+ {GroupConversation: slack.GroupConversation{Name: "channel1", Conversation: slack.Conversation{ID: "C123456"}}},
+ {GroupConversation: slack.GroupConversation{Name: "channel2", Conversation: slack.Conversation{ID: "C234567"}}},
+ },
+ },
+ teamId: "T123456",
+ wantedChannels: []string{},
+ expectedResult: []slack.Channel{
+ {GroupConversation: slack.GroupConversation{Name: "channel1", Conversation: slack.Conversation{ID: "C123456"}}},
+ {GroupConversation: slack.GroupConversation{Name: "channel2", Conversation: slack.Conversation{ID: "C234567"}}},
+ },
+ expectedError: nil,
+ },
+ {
+ name: "get specific channels",
+ slackApi: mockSlackClient{
+ channels: []slack.Channel{
+ {GroupConversation: slack.GroupConversation{Name: "channel1", Conversation: slack.Conversation{ID: "C123456"}}},
+ {GroupConversation: slack.GroupConversation{Name: "channel2", Conversation: slack.Conversation{ID: "C234567"}}},
+ },
+ },
+ teamId: "T123456",
+ wantedChannels: []string{"channel1", "C234567"},
+ expectedResult: []slack.Channel{
+ {GroupConversation: slack.GroupConversation{Name: "channel1", Conversation: slack.Conversation{ID: "C123456"}}},
+ {GroupConversation: slack.GroupConversation{Name: "channel2", Conversation: slack.Conversation{ID: "C234567"}}},
+ },
+ expectedError: nil,
+ },
+ {
+ name: "get specific channels not found",
+ slackApi: mockSlackClient{
+ channels: []slack.Channel{
+ {GroupConversation: slack.GroupConversation{Name: "channel1", Conversation: slack.Conversation{ID: "C123456"}}},
+ {GroupConversation: slack.GroupConversation{Name: "channel2", Conversation: slack.Conversation{ID: "C234567"}}},
+ },
+ },
+ teamId: "T123456",
+ wantedChannels: []string{"channel3", "C345678"},
+ expectedResult: []slack.Channel{},
+ expectedError: nil,
+ },
+ {
+ name: "get channels error",
+ slackApi: mockSlackClient{
+ err: fmt.Errorf("some error"),
+ channels: []slack.Channel{},
+ },
+ teamId: "T123456",
+ wantedChannels: []string{},
+ expectedResult: []slack.Channel{},
+ expectedError: fmt.Errorf("error while getting channels: %w", errors.New("some error")),
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ result, err := getChannels(&tt.slackApi, tt.teamId, tt.wantedChannels)
+ if err != nil && tt.expectedError == nil {
+ t.Errorf("unexpected error: %v", err)
+ }
+ if err == nil && tt.expectedError != nil {
+ t.Errorf("expected error: %v, but got nil", tt.expectedError)
+ }
+ if err != nil && tt.expectedError != nil {
+ return
+ }
+ if len(*result) != len(tt.expectedResult) {
+ t.Errorf("expected %d channels, but got %d", len(tt.expectedResult), len(*result))
+ }
+ for i, c := range *result {
+ if c.Name != tt.expectedResult[i].Name || c.ID != tt.expectedResult[i].ID {
+ t.Errorf("expected channel %v, but got %v", tt.expectedResult[i], c)
+ }
+ }
+ })
+ }
+}
+
+func formatSecondsAnd6DigitsMiliseconds(t time.Time) string {
+ n := float64(t.UnixMicro()) / float64(time.Millisecond)
+ return strconv.FormatFloat(n, 'f', 6, 64)
+}
+
+const (
+ noLimit = 0
+)
+
+func TestIsMessageOutOfRange(t *testing.T) {
+ tests := []struct {
+ name string
+ message slack.Message
+ backwardDuration time.Duration
+ currentMessagesCount int
+ limitMessagesCount int
+ expectedOutOfRange bool
+ }{
+ {
+ name: "message is within range",
+ message: slack.Message{
+ Msg: slack.Msg{
+ Timestamp: formatSecondsAnd6DigitsMiliseconds(timeNow),
+ },
+ },
+ backwardDuration: time.Minute,
+ currentMessagesCount: 0,
+ limitMessagesCount: noLimit,
+ expectedOutOfRange: false,
+ },
+ {
+ name: "message is out of range due to backward duration",
+ message: slack.Message{
+ Msg: slack.Msg{
+ Timestamp: formatSecondsAnd6DigitsMiliseconds(timeNow.Add(-time.Minute * 2)),
+ },
+ },
+ backwardDuration: time.Minute,
+ currentMessagesCount: 0,
+ limitMessagesCount: noLimit,
+ expectedOutOfRange: true,
+ },
+ {
+ name: "message is out of range due to message count limit",
+ message: slack.Message{
+ Msg: slack.Msg{
+ Timestamp: formatSecondsAnd6DigitsMiliseconds(timeNow),
+ },
+ },
+ backwardDuration: noLimit,
+ currentMessagesCount: 1,
+ limitMessagesCount: 1,
+ expectedOutOfRange: true,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ outOfRange, err := isMessageOutOfRange(tt.message, tt.backwardDuration, tt.currentMessagesCount, tt.limitMessagesCount)
+ if err != nil {
+ t.Errorf("unexpected error: %v", err)
+ }
+ if outOfRange != tt.expectedOutOfRange {
+ t.Errorf("expected outOfRange to be %v, but got %v", tt.expectedOutOfRange, outOfRange)
+ }
+ })
+ }
+}
+
+func TestGetTeam(t *testing.T) {
+ tests := []struct {
+ name string
+ teamNameToSearch string
+ mockResponses []ListTeamsResponse
+ expectedTeam *slack.Team
+ expectedErrSubstr string
+ }{
+ {
+ name: "ListTeams returns error",
+ teamNameToSearch: "AnyTeam",
+ mockResponses: []ListTeamsResponse{
+ {
+ Teams: nil,
+ Cursor: "",
+ Err: errors.New("some error"),
+ },
+ },
+ expectedTeam: nil,
+ expectedErrSubstr: "error while getting teams",
+ },
+ {
+ name: "Team found by Name on first page",
+ teamNameToSearch: "TeamA",
+ mockResponses: []ListTeamsResponse{
+ {
+ Teams: []slack.Team{
+ {ID: "2", Name: "OtherTeam"},
+ {ID: "1", Name: "TeamA"},
+ },
+ Cursor: "",
+ Err: nil,
+ },
+ },
+ expectedTeam: &slack.Team{ID: "1", Name: "TeamA"},
+ expectedErrSubstr: "",
+ },
+ {
+ name: "Team found by ID on first page",
+ teamNameToSearch: "TeamB",
+ mockResponses: []ListTeamsResponse{
+ {
+ Teams: []slack.Team{
+ {ID: "1", Name: "OtherTeam"},
+ {ID: "TeamB", Name: "SomeTeam"},
+ },
+ Cursor: "",
+ Err: nil,
+ },
+ },
+ expectedTeam: &slack.Team{ID: "TeamB", Name: "SomeTeam"},
+ expectedErrSubstr: "",
+ },
+ {
+ name: "Team found in second page",
+ teamNameToSearch: "TeamC",
+ mockResponses: []ListTeamsResponse{
+ {
+ Teams: []slack.Team{
+ {ID: "1", Name: "OtherTeam"},
+ },
+ Cursor: "cursor1",
+ Err: nil,
+ },
+ {
+ Teams: []slack.Team{
+ {ID: "3", Name: "TeamC"},
+ },
+ Cursor: "",
+ Err: nil,
+ },
+ },
+ expectedTeam: &slack.Team{ID: "3", Name: "TeamC"},
+ expectedErrSubstr: "",
+ },
+ {
+ name: "Team not found",
+ teamNameToSearch: "TeamNotFound",
+ mockResponses: []ListTeamsResponse{
+ {
+ Teams: []slack.Team{
+ {ID: "1", Name: "OtherTeam1"},
+ },
+ Cursor: "cursor1",
+ Err: nil,
+ },
+ {
+ Teams: []slack.Team{
+ {ID: "2", Name: "OtherTeam2"},
+ },
+ Cursor: "",
+ Err: nil,
+ },
+ },
+ expectedTeam: nil,
+ expectedErrSubstr: "team 'TeamNotFound' not found",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ client := &mockSlackClient{
+ listTeamsResponses: tt.mockResponses,
+ }
+
+ team, err := getTeam(client, tt.teamNameToSearch)
+ if tt.expectedTeam != nil {
+ assert.NoError(t, err)
+ assert.NotNil(t, team)
+ assert.Equal(t, *tt.expectedTeam, *team)
+ } else {
+ assert.Nil(t, team)
+ assert.Error(t, err)
+ assert.Contains(t, err.Error(), tt.expectedErrSubstr)
+ }
+ })
+ }
+}
diff --git a/tests/e2e.go b/tests/e2e.go
index 7af45f1f..9c51f82f 100644
--- a/tests/e2e.go
+++ b/tests/e2e.go
@@ -1,79 +1,79 @@
-package tests
-
-// TODO: add confluence test
-
-import (
- "encoding/json"
- "fmt"
- "go/build"
- "os"
- "os/exec"
- "path"
- "runtime"
-
- "github.com/checkmarx/2ms/lib/reporting"
-)
-
-type cli struct {
- executable string
- resultsPath string
-}
-
-func createCLI(outputDir string) (cli, error) {
- executable := path.Join(outputDir, "2ms")
- lib, err := build.Import("github.com/checkmarx/2ms", "", build.FindOnly)
- if err != nil {
- return cli{}, fmt.Errorf("failed to import 2ms: %s", err)
- }
-
- cmd := exec.Command("go", "build", "-o", executable, lib.ImportPath)
- cmd.Env = append(os.Environ(), fmt.Sprintf("GOOS=%s", runtime.GOOS), fmt.Sprintf("GOARCH=%s", runtime.GOARCH))
-
- cmd.Stdout = os.Stdout
- cmd.Stderr = os.Stderr
-
- if err := cmd.Run(); err != nil {
- return cli{}, fmt.Errorf("failed to build 2ms: %s", err)
- }
-
- return cli{
- executable: executable,
- resultsPath: path.Join(outputDir, "results.json"),
- },
- nil
-}
-
-func generateFileWithSecret(outputDir string, filename string) error {
- token := "g" + "hp" + "_ixOl" + "iEFNK4O" + "brYB506" + "8oXFd" + "9JUF" + "iRy0RU" + "KNl"
- content := "bla bla bla\nGitHubToken: " + token + "\nbla bla bla"
-
- if err := os.WriteFile(path.Join(outputDir, filename), []byte(content), 0644); err != nil {
- return err
- }
-
- return nil
-}
-
-func (c *cli) run(command string, args ...string) error {
- argsWithDefault := append([]string{command}, args...)
- argsWithDefault = append(argsWithDefault, "--report-path", c.resultsPath)
-
- cmd := exec.Command(c.executable, argsWithDefault...)
- cmd.Stdout = os.Stdout
- cmd.Stderr = os.Stderr
- return cmd.Run()
-}
-
-func (c *cli) getReport() (reporting.Report, error) {
- report := reporting.Init()
-
- content, err := os.ReadFile(c.resultsPath)
- if err != nil {
- return reporting.Report{}, err
- }
- if err := json.Unmarshal(content, &report); err != nil {
- return reporting.Report{}, err
- }
-
- return *report, nil
-}
+package tests
+
+// TODO: add confluence test
+
+import (
+ "encoding/json"
+ "fmt"
+ "go/build"
+ "os"
+ "os/exec"
+ "path"
+ "runtime"
+
+ "github.com/checkmarx/2ms/lib/reporting"
+)
+
+type cli struct {
+ executable string
+ resultsPath string
+}
+
+func createCLI(outputDir string) (cli, error) {
+ executable := path.Join(outputDir, "2ms")
+ lib, err := build.Import("github.com/checkmarx/2ms", "", build.FindOnly)
+ if err != nil {
+ return cli{}, fmt.Errorf("failed to import 2ms: %s", err)
+ }
+
+ cmd := exec.Command("go", "build", "-o", executable, lib.ImportPath)
+ cmd.Env = append(os.Environ(), fmt.Sprintf("GOOS=%s", runtime.GOOS), fmt.Sprintf("GOARCH=%s", runtime.GOARCH))
+
+ cmd.Stdout = os.Stdout
+ cmd.Stderr = os.Stderr
+
+ if err := cmd.Run(); err != nil {
+ return cli{}, fmt.Errorf("failed to build 2ms: %s", err)
+ }
+
+ return cli{
+ executable: executable,
+ resultsPath: path.Join(outputDir, "results.json"),
+ },
+ nil
+}
+
+func generateFileWithSecret(outputDir string, filename string) error {
+ token := "g" + "hp" + "_ixOl" + "iEFNK4O" + "brYB506" + "8oXFd" + "9JUF" + "iRy0RU" + "KNl"
+ content := "bla bla bla\nGitHubToken: " + token + "\nbla bla bla"
+
+ if err := os.WriteFile(path.Join(outputDir, filename), []byte(content), 0644); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func (c *cli) run(command string, args ...string) error {
+ argsWithDefault := append([]string{command}, args...)
+ argsWithDefault = append(argsWithDefault, "--report-path", c.resultsPath)
+
+ cmd := exec.Command(c.executable, argsWithDefault...)
+ cmd.Stdout = os.Stdout
+ cmd.Stderr = os.Stderr
+ return cmd.Run()
+}
+
+func (c *cli) getReport() (reporting.Report, error) {
+ report := reporting.Init()
+
+ content, err := os.ReadFile(c.resultsPath)
+ if err != nil {
+ return reporting.Report{}, err
+ }
+ if err := json.Unmarshal(content, &report); err != nil {
+ return reporting.Report{}, err
+ }
+
+ return *report, nil
+}
diff --git a/tests/lint.go b/tests/lint.go
index 65e43ffc..80492de3 100644
--- a/tests/lint.go
+++ b/tests/lint.go
@@ -1,88 +1,88 @@
-package tests
-
-import (
- "bufio"
- "fmt"
- "os"
- "path/filepath"
- "regexp"
-)
-
-var ignoreComment = regexp.MustCompile(`//\s*lint:ignore`)
-
-func walkGoFiles() <-chan string {
- ignoredDirs := []string{
- ".git",
- ".github",
- ".vscode",
- "vendor",
- "tests",
- ".ci",
- }
-
- ch := make(chan string)
-
- go func() {
- defer close(ch)
- err := filepath.Walk("..", func(path string, info os.FileInfo, err error) error {
- if err != nil {
- return err
- }
- if filepath.Ext(path) == ".go" {
- ch <- path
- }
-
- if info.IsDir() {
- for _, ignoredDir := range ignoredDirs {
- if info.Name() == ignoredDir {
- return filepath.SkipDir
- }
- }
- }
- return nil
- })
-
- if err != nil {
- panic(err)
- }
- }()
-
- return ch
-}
-
-var forbiddenPatterns = []*regexp.Regexp{
- regexp.MustCompile(`fmt\.Print`),
- regexp.MustCompile(`log\.Fatal\(\)`),
-}
-
-var ignoreFiles = regexp.MustCompile(`_test\.go$`)
-
-func lintFile(path string) error {
- if ignoreFiles.MatchString(path) {
- return nil
- }
-
- file, err := os.Open(path)
- if err != nil {
- return err
- }
- defer file.Close()
-
- scanner := bufio.NewScanner(file)
- line := 1
- for scanner.Scan() {
- lineText := scanner.Text()
- for _, forbiddenPattern := range forbiddenPatterns {
- if forbiddenPattern.MatchString(lineText) && !ignoreComment.MatchString(lineText) {
- return fmt.Errorf("%s:%d: forbidden pattern found: %s", path, line, forbiddenPattern.String())
- }
- }
- line++
- }
-
- if err := scanner.Err(); err != nil {
- return err
- }
-
- return nil
-}
+package tests
+
+import (
+ "bufio"
+ "fmt"
+ "os"
+ "path/filepath"
+ "regexp"
+)
+
+var ignoreComment = regexp.MustCompile(`//\s*lint:ignore`)
+
+func walkGoFiles() <-chan string {
+ ignoredDirs := []string{
+ ".git",
+ ".github",
+ ".vscode",
+ "vendor",
+ "tests",
+ ".ci",
+ }
+
+ ch := make(chan string)
+
+ go func() {
+ defer close(ch)
+ err := filepath.Walk("..", func(path string, info os.FileInfo, err error) error {
+ if err != nil {
+ return err
+ }
+ if filepath.Ext(path) == ".go" {
+ ch <- path
+ }
+
+ if info.IsDir() {
+ for _, ignoredDir := range ignoredDirs {
+ if info.Name() == ignoredDir {
+ return filepath.SkipDir
+ }
+ }
+ }
+ return nil
+ })
+
+ if err != nil {
+ panic(err)
+ }
+ }()
+
+ return ch
+}
+
+var forbiddenPatterns = []*regexp.Regexp{
+ regexp.MustCompile(`fmt\.Print`),
+ regexp.MustCompile(`log\.Fatal\(\)`),
+}
+
+var ignoreFiles = regexp.MustCompile(`_test\.go$`)
+
+func lintFile(path string) error {
+ if ignoreFiles.MatchString(path) {
+ return nil
+ }
+
+ file, err := os.Open(path)
+ if err != nil {
+ return err
+ }
+ defer file.Close()
+
+ scanner := bufio.NewScanner(file)
+ line := 1
+ for scanner.Scan() {
+ lineText := scanner.Text()
+ for _, forbiddenPattern := range forbiddenPatterns {
+ if forbiddenPattern.MatchString(lineText) && !ignoreComment.MatchString(lineText) {
+ return fmt.Errorf("%s:%d: forbidden pattern found: %s", path, line, forbiddenPattern.String())
+ }
+ }
+ line++
+ }
+
+ if err := scanner.Err(); err != nil {
+ return err
+ }
+
+ return nil
+}
diff --git a/tests/lint_test.go b/tests/lint_test.go
index 6f3a385e..8616ec9b 100644
--- a/tests/lint_test.go
+++ b/tests/lint_test.go
@@ -1,17 +1,17 @@
-package tests
-
-import (
- "testing"
-)
-
-func TestLintIntegration(t *testing.T) {
- if testing.Short() {
- t.Skip("skipping integration test")
- }
-
- for path := range walkGoFiles() {
- if err := lintFile(path); err != nil {
- t.Errorf("lint error: %s", err)
- }
- }
-}
+package tests
+
+import (
+ "testing"
+)
+
+func TestLintIntegration(t *testing.T) {
+ if testing.Short() {
+ t.Skip("skipping integration test")
+ }
+
+ for path := range walkGoFiles() {
+ if err := lintFile(path); err != nil {
+ t.Errorf("lint error: %s", err)
+ }
+ }
+}
diff --git a/trivy.yaml b/trivy.yaml
index 34d4fe15..65086a48 100644
--- a/trivy.yaml
+++ b/trivy.yaml
@@ -1,3 +1,3 @@
-vulnerability:
- vex:
- - ignore.openvex
+vulnerability:
+ vex:
+ - ignore.openvex