Skip to content

Merge pull request #220 from HackTricks-wiki/update_Skimming_Credenti… #322

Merge pull request #220 from HackTricks-wiki/update_Skimming_Credenti…

Merge pull request #220 from HackTricks-wiki/update_Skimming_Credenti… #322

Workflow file for this run

name: Build Master
on:
push:
branches:
- master
paths-ignore:
- 'scripts/**'
- '.gitignore'
- '.github/**'
- 'book/**'
workflow_dispatch:
concurrency: build_master
permissions:
packages: write
id-token: write
contents: write
jobs:
run-translation:
runs-on: ubuntu-latest
container:
image: ghcr.io/hacktricks-wiki/hacktricks-cloud/translator-image:latest
environment: prod
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0 #Needed to download everything to be able to access the master & language branches
# Build the mdBook
- name: Build mdBook
run: MDBOOK_BOOK__LANGUAGE=en mdbook build || (echo "Error logs" && cat hacktricks-preprocessor-error.log && echo "" && echo "" && echo "Debug logs" && (cat hacktricks-preprocessor.log | tail -n 20) && exit 1)
- name: Push search index to hacktricks-searchindex repo
shell: bash
env:
PAT_TOKEN: ${{ secrets.PAT_TOKEN }}
run: |
set -euo pipefail
ASSET="book/searchindex.js"
TARGET_REPO="HackTricks-wiki/hacktricks-searchindex"
FILENAME="searchindex-cloud-en.js"
if [ ! -f "$ASSET" ]; then
echo "Expected $ASSET to exist after build" >&2
exit 1
fi
TOKEN="${PAT_TOKEN}"
if [ -z "$TOKEN" ]; then
echo "No PAT_TOKEN available" >&2
exit 1
fi
# Clone the searchindex repo
git clone https://x-access-token:${TOKEN}@github.com/${TARGET_REPO}.git /tmp/searchindex-repo
cd /tmp/searchindex-repo
git config user.name "GitHub Actions"
git config user.email "[email protected]"
# Save all current files from main branch to temp directory
mkdir -p /tmp/searchindex-backup
cp -r * /tmp/searchindex-backup/ 2>/dev/null || true
# Create a fresh orphan branch (no history)
git checkout --orphan new-main
# Remove all files from git index (but keep working directory)
git rm -rf . 2>/dev/null || true
# Restore all the files from backup (keeps all language files)
cp -r /tmp/searchindex-backup/* . 2>/dev/null || true
# Now update/add our English searchindex file
# First, compress the original file (in the build directory)
cd "${GITHUB_WORKSPACE}"
gzip -9 -k -f "$ASSET"
# Show compression stats
ORIGINAL_SIZE=$(wc -c < "$ASSET")
COMPRESSED_SIZE=$(wc -c < "${ASSET}.gz")
RATIO=$(awk "BEGIN {printf \"%.1f\", ($COMPRESSED_SIZE / $ORIGINAL_SIZE) * 100}")
echo "Compression: ${ORIGINAL_SIZE} bytes -> ${COMPRESSED_SIZE} bytes (${RATIO}%)"
# XOR encrypt the compressed file
KEY='Prevent_Online_AVs_From_Flagging_HackTricks_Search_Gzip_As_Malicious_394h7gt8rf9u3rf9g'
cat > /tmp/xor_encrypt.py << 'EOF'
import sys
key = sys.argv[1]
input_file = sys.argv[2]
output_file = sys.argv[3]
with open(input_file, 'rb') as f:
data = f.read()
key_bytes = key.encode('utf-8')
encrypted = bytearray(len(data))
for i in range(len(data)):
encrypted[i] = data[i] ^ key_bytes[i % len(key_bytes)]
with open(output_file, 'wb') as f:
f.write(encrypted)
print(f"Encrypted: {len(data)} bytes")
EOF
python3 /tmp/xor_encrypt.py "$KEY" "${ASSET}.gz" "${ASSET}.gz.enc"
# Copy ONLY the encrypted .gz version to the searchindex repo (no uncompressed .js)
cd /tmp/searchindex-repo
cp "${GITHUB_WORKSPACE}/${ASSET}.gz.enc" "${FILENAME}.gz"
# Stage all files
git add -A
# Commit with timestamp
TIMESTAMP=$(date -u +"%Y-%m-%d %H:%M:%S UTC")
git commit -m "Update searchindex files - ${TIMESTAMP}" --allow-empty
# Force push to replace master branch (deletes history, keeps all files)
git push -f origin new-main:master
echo "Successfully reset repository history and pushed all searchindex files"
# Login in AWs
- name: Configure AWS credentials using OIDC
uses: aws-actions/configure-aws-credentials@v3
with:
role-to-assume: ${{ secrets.AWS_ROLE_ARN }}
aws-region: us-east-1
# Sync the build to S3
- name: Sync to S3
run: aws s3 sync ./book s3://hacktricks-cloud/en --delete