Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
136 changes: 136 additions & 0 deletions .github/workflows/bucket-upload.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
on:
pull_request:

jobs:
bucket-upload-S3:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ github.event.pull_request.head.sha }}

- uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0
with:
go-version: "^1.22"

- name: Clone 2ms Repository and Checkout Commit SHA
run: |
git clone https://github.com/checkmarx/2ms.git $GITHUB_WORKSPACE/2ms
cd $GITHUB_WORKSPACE/2ms
git fetch --all
git checkout ${{ github.event.pull_request.head.sha }}
go build -o $GITHUB_WORKSPACE/2ms/dist/2ms main.go
chmod +x $GITHUB_WORKSPACE/2ms/dist/2ms
- name: Load Repos from JSON and Clone Each Repo
run: |
curl -o /tmp/repos.json https://raw.githubusercontent.com/cx-miguel-neiva/2ms-github-action/main/repos.json
REPOS_LIST=$(jq -r '.projects[]' /tmp/repos.json | tr '\n' ' ')
echo "repos=$REPOS_LIST" >> $GITHUB_ENV
for repo_url in $REPOS_LIST; do
repo_name=$(basename "$repo_url" .git)
mkdir -p "$GITHUB_WORKSPACE/repos/$repo_name"
git clone "$repo_url" "$GITHUB_WORKSPACE/repos/$repo_name"
done
- name: Run 2ms Scan for each repo
run: |
mkdir -p $GITHUB_WORKSPACE/results
IFS=' ' read -r -a REPOS_ARRAY <<< "$repos"
touch $GITHUB_WORKSPACE/scan_results.json
echo "[" > $GITHUB_WORKSPACE/scan_results.json
for repo_url in "${REPOS_ARRAY[@]}"; do
repo_name=$(basename "$repo_url" .git)
result_sarif="$GITHUB_WORKSPACE/results/$repo_name.sarif"
start_time=$(date +%s.%N)
if $GITHUB_WORKSPACE/2ms/dist/2ms filesystem --path "$GITHUB_WORKSPACE/repos/$repo_name" --ignore-on-exit results --report-path "$result_sarif"; then
scan_status="success"
else
scan_status="failure"
fi
end_time=$(date +%s.%N)
execution_time=$(echo "$end_time - $start_time" | bc)
execution_time_formatted=$(printf "%.2f" "$execution_time")
echo "{
\"repo_name\": \"$repo_name\",
\"scan_status\": \"$scan_status\",
\"execution_time\": \"$execution_time_formatted\"
}," >> $GITHUB_WORKSPACE/scan_results.json
done
sed -i '$ s/,$//' $GITHUB_WORKSPACE/scan_results.json
echo "]" >> $GITHUB_WORKSPACE/scan_results.json
cp -r $GITHUB_WORKSPACE/results $GITHUB_WORKSPACE/results_backup
- name: Get Results Directory
id: get_results_dir
run: |
echo "results_dir=results" >> $GITHUB_ENV
- name: Get 2ms Version
id: get_twoms_version
run: |
echo "twoms_version=$(curl -s https://api.github.com/repos/checkmarx/2ms/releases/latest | jq -r '.tag_name')" >> $GITHUB_ENV
- name: Set S3 Destination Path
id: set_s3_path
run: |
BRANCH_NAME="${{ github.head_ref || github.ref_name }}"
PR_NUMBER="${{ github.event.number }}"
ENGINE="2ms"
COMMIT_HASH="${{ github.sha }}"
PR_OWNER="${{ github.actor }}"
TARGET_BRANCH="master"
DEST_DIR="${ENGINE}/${TARGET_BRANCH}/${BRANCH_NAME}/${{ env.twoms_version }}/pr-${PR_NUMBER}"
echo "destination_dir=$DEST_DIR" >> $GITHUB_ENV
echo "results_dir=${{ env.results_dir }}" >> $GITHUB_ENV
- name: Organize SARIF files
run: |
mkdir -p "${{ env.results_dir }}/pr-${{ github.event.number }}"
for sarif_file in $GITHUB_WORKSPACE/results/*.sarif; do
if [[ -f "$sarif_file" ]]; then
project_name=$(basename "$sarif_file" .sarif)
mkdir -p "${{ env.results_dir }}/pr-${{ github.event.number }}/$project_name"
mv "$sarif_file" "${{ env.results_dir }}/pr-${{ github.event.number }}/$project_name/results.sarif"
fi
done
- name: Create Metadata File
run: |
COMMIT_TIMESTAMP=$(git log -1 --format=%ct)
METADATA_PATH="${{ env.results_dir }}/pr-${{ github.event.number }}/metadata.json"
echo '{
"seq": "'"${COMMIT_TIMESTAMP}"'",
"tag": "'"${{ github.event.number }}"'",
"comment": "'"${{ github.event.pull_request.title }}"'",
"commit": "'"${{ github.sha }}"'",
"owner": "'"${{ github.actor }}"'",
"branch": "'"${{ github.head_ref || github.ref_name }}"'",
"engine": "2ms",
"version": "'"${{ env.twoms_version }}"'"
}' > "$METADATA_PATH"
- name: Upload results to S3
run: |
aws s3 cp --recursive "${{ env.results_dir }}/pr-${{ github.event.number }}" "s3://${{ secrets.CES_AWS_BUCKET }}/${{ env.destination_dir }}" \
--storage-class STANDARD
env:
AWS_ACCESS_KEY_ID: ${{ secrets.CES_BUCKET_AWS_ACCESS_KEY }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.CES_BUCKET_AWS_SECRET_ACCESS_KEY }}

- name: Get Scan Results for Comment
id: scan_results
run: |
echo "| Repository | Status | Execution Time (seconds) |" > $GITHUB_WORKSPACE/scan_results_table.md
echo "|------------|--------|--------------------------|" >> $GITHUB_WORKSPACE/scan_results_table.md
jq -r '
.[] |
"| \(.repo_name) | " +
(if .scan_status == "success" then "✅" else "❌" end) +
" | \(.execution_time) |"' $GITHUB_WORKSPACE/scan_results.json >> $GITHUB_WORKSPACE/scan_results_table.md
echo "SCAN_RESULTS<<EOF" >> $GITHUB_ENV
cat $GITHUB_WORKSPACE/scan_results_table.md >> $GITHUB_ENV
echo "EOF" >> $GITHUB_ENV
- name: Create PR Comment with Job Summary in Table
uses: peter-evans/create-or-update-comment@v2

Check warning on line 128 in .github/workflows/bucket-upload.yaml

View workflow job for this annotation

GitHub Actions / kics

[MEDIUM] Unpinned Actions Full Length Commit SHA

Pinning an action to a full length commit SHA is currently the only way to use an action as an immutable release. Pinning to a particular SHA helps mitigate the risk of a bad actor adding a backdoor to the action's repository, as they would need to generate a SHA-1 collision for a valid Git object payload. When selecting a SHA, you should verify it is from the action's repository and not a repository fork.
with:
issue-number: ${{ github.event.pull_request.number }}
body: |
## 🛠 Scan Summary


${{ env.SCAN_RESULTS }}

2 changes: 2 additions & 0 deletions .github/workflows/new-rules.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ on:
schedule:
- cron: "0 2 * * 6" # At 02:00 on Saturday

#New Branch

jobs:
update_secrets:
runs-on: ubuntu-latest
Expand Down
12 changes: 8 additions & 4 deletions engine/engine.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,15 @@
import (
"crypto/sha1"
"fmt"
"github.com/checkmarx/2ms/engine/linecontent"
"github.com/checkmarx/2ms/engine/score"
"os"
"regexp"
"strings"
"sync"
"text/tabwriter"

"github.com/checkmarx/2ms/engine/linecontent"
"github.com/checkmarx/2ms/engine/score"

"github.com/checkmarx/2ms/engine/rules"
"github.com/checkmarx/2ms/engine/validation"
"github.com/checkmarx/2ms/lib/secrets"
Expand Down Expand Up @@ -47,20 +48,23 @@

func Init(engineConfig EngineConfig) (*Engine, error) {
selectedRules := rules.FilterRules(engineConfig.SelectedList, engineConfig.IgnoreList, engineConfig.SpecialList)

if len(*selectedRules) == 0 {
return nil, fmt.Errorf("no rules were selected")
}

rulesToBeApplied := make(map[string]config.Rule)
rulesBaseRiskScore := make(map[string]float64)
keywords := []string{}
keywords := map[string]struct{}{}

for _, rule := range *selectedRules {
rulesToBeApplied[rule.Rule.RuleID] = rule.Rule
rulesBaseRiskScore[rule.Rule.RuleID] = score.GetBaseRiskScore(rule.ScoreParameters.Category, rule.ScoreParameters.RuleType)
for _, keyword := range rule.Rule.Keywords {
keywords = append(keywords, strings.ToLower(keyword))
keywords[strings.ToLower(keyword)] = struct{}{}
}
}

cfg.Rules = rulesToBeApplied
cfg.Keywords = keywords

Expand All @@ -70,7 +74,7 @@
return &Engine{
rules: rulesToBeApplied,
rulesBaseRiskScore: rulesBaseRiskScore,
detector: *detector,

Check failure on line 77 in engine/engine.go

View workflow job for this annotation

GitHub Actions / test (ubuntu-latest)

copylocks: literal copies lock value from *detector: github.com/zricethezav/gitleaks/v8/detect.Detector contains sync/atomic.Uint64 contains sync/atomic.noCopy (govet)
validator: *validation.NewValidator(),

ignoredIds: engineConfig.IgnoredIds,
Expand Down
6 changes: 4 additions & 2 deletions engine/rules/authenticated_url.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,10 @@ func AuthenticatedURL() *config.Rule {
Regex: regex,
Keywords: []string{"://"},
SecretGroup: 1,
Allowlist: config.Allowlist{
StopWords: []string{"password", "pass"},
Allowlists: []config.Allowlist{
{
StopWords: []string{"password", "pass"},
},
},
}

Expand Down
6 changes: 4 additions & 2 deletions engine/rules/hardcodedPassword.go
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,10 @@ func HardcodedPassword() *config.Rule {
},
Entropy: 0,
SecretGroup: 1,
Allowlist: config.Allowlist{
StopWords: rules.DefaultStopWords,
Allowlists: []config.Allowlist{
{
StopWords: rules.DefaultStopWords,
},
},
}

Expand Down
8 changes: 4 additions & 4 deletions engine/rules/privateKey.go
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
package rules

import (
"github.com/zricethezav/gitleaks/v8/config"
"regexp"

"github.com/zricethezav/gitleaks/v8/config"
)

func PrivateKey() *config.Rule {
Expand All @@ -19,9 +20,8 @@ func PrivateKey() *config.Rule {
anything
-----END PRIVATE KEY-----`,
`-----BEGIN RSA PRIVATE KEY-----
abcdefghijklmnopqrstuvwxyz
-----END RSA PRIVATE KEY-----
`,
abcdefghijklmnopqrstuvwxz
-----END RSA PRIVATE KEY-----`,
`-----BEGIN PRIVATE KEY BLOCK-----
anything
-----END PRIVATE KEY BLOCK-----`,
Expand Down
72 changes: 45 additions & 27 deletions engine/rules/rule.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,9 @@ package rules
import (
"strings"

"github.com/rs/zerolog/log"
"github.com/zricethezav/gitleaks/v8/cmd/generate/config/base"
"github.com/zricethezav/gitleaks/v8/logging"

"github.com/zricethezav/gitleaks/v8/config"
"github.com/zricethezav/gitleaks/v8/detect"
)
Expand All @@ -20,37 +22,53 @@ type Rule struct {
}

// Copied from https://github.com/gitleaks/gitleaks/blob/463d24618fa42fc7629dc30c9744ebe36c5df1ab/cmd/generate/config/rules/rule.go
func validate(r config.Rule, truePositives []string, falsePositives []string) *config.Rule {
// normalize keywords like in the config package
var keywords []string
for _, k := range r.Keywords {
keywords = append(keywords, strings.ToLower(k))
}
r.Keywords = keywords
func validate(rule config.Rule, truePositives []string, falsePositives []string) *config.Rule {
r := &rule
d := createSingleRuleDetector(r)

rules := make(map[string]config.Rule)
rules[r.RuleID] = r
d := detect.NewDetector(config.Config{
Rules: rules,
Keywords: keywords,
})
for _, tp := range truePositives {
if len(d.DetectString(tp)) != 1 {
log.Fatal(). // lint:ignore This Fatal happens in a test
Str("rule", r.RuleID).
Str("value", tp).
Str("regex", r.Regex.String()).
Msg("Failed to Validate. True positive was not detected by regex.")
if len(d.DetectString(tp)) < 1 {
logging.Fatal().
Str("rule", r.RuleID).
Str("value", tp).
Str("regex", r.Regex.String()).
Msg("Failed to Validate. True positive was not detected by regex.")
}
}
for _, fp := range falsePositives {
if len(d.DetectString(fp)) != 0 {
log.Fatal(). // lint:ignore This Fatal happens in a test
Str("rule", r.RuleID).
Str("value", fp).
Str("regex", r.Regex.String()).
Msg("Failed to Validate. False positive was detected by regex.")
findings := d.DetectString(fp)
if len(findings) != 0 {
logging.Fatal().
Str("rule", r.RuleID).
Str("value", fp).
Str("regex", r.Regex.String()).
Msg("Failed to Validate. False positive was detected by regex.")
}
}
return r
}

func createSingleRuleDetector(r *config.Rule) *detect.Detector {
// normalize keywords like in the config package
var (
uniqueKeywords = make(map[string]struct{})
keywords []string
)
for _, keyword := range r.Keywords {
k := strings.ToLower(keyword)
if _, ok := uniqueKeywords[k]; ok {
continue
}
keywords = append(keywords, k)
uniqueKeywords[k] = struct{}{}
}
r.Keywords = keywords

rules := map[string]config.Rule{
r.RuleID: *r,
}
return &r
cfg := base.CreateGlobalConfig()
cfg.Rules = rules
cfg.Keywords = uniqueKeywords
return detect.NewDetector(cfg)
}
Loading
Loading