Skip to content

Include additional files with releases #506

Include additional files with releases

Include additional files with releases #506

Workflow file for this run

name: Bot Reply on Mention
on:
issue_comment:
types: [created]
jobs:
continuous-reply:
# Skip if comment author is a bot (workflow will show as "skipped")
if: |
github.event.comment.user.login != 'mirrobot' &&
github.event.comment.user.login != 'mirrobot-agent' &&
github.event.comment.user.login != 'mirrobot-agent[bot]' &&
(contains(github.event.comment.body, '@mirrobot') ||
contains(github.event.comment.body, '@mirrobot-agent'))
runs-on: ubuntu-latest
permissions:
contents: write
issues: write
pull-requests: write
env:
THREAD_NUMBER: ${{ github.event.issue.number }}
BOT_NAMES_JSON: '["mirrobot", "mirrobot-agent", "mirrobot-agent[bot]"]'
IGNORE_BOT_NAMES_JSON: '["ellipsis-dev"]'
COMMENT_FETCH_LIMIT: '20'
REVIEW_FETCH_LIMIT: '15'
REVIEW_THREAD_FETCH_LIMIT: '20'
THREAD_COMMENT_FETCH_LIMIT: '5'
steps:
# ========================================================================
# COMMENT VALIDATION STEP
# ========================================================================
# Validates that trigger words are in actual content (not in quotes/code)
# If validation fails, all subsequent steps are skipped
# ========================================================================
- name: Validate comment trigger
id: validate
env:
COMMENT_BODY: ${{ github.event.comment.body }}
run: |
set -e
# Save comment to temp file for processing
TEMP_FILE=$(mktemp)
echo "$COMMENT_BODY" > "$TEMP_FILE"
# Remove fenced code blocks (```...```)
CLEAN_BODY=$(awk '
/^```/ { in_code = !in_code; next }
!in_code { print }
' "$TEMP_FILE")
# Remove inline code (`...`)
CLEAN_BODY=$(echo "$CLEAN_BODY" | sed 's/`[^`]*`//g')
# Remove quoted lines (lines starting with >)
CLEAN_BODY=$(echo "$CLEAN_BODY" | grep -v '^[[:space:]]*>' || true)
rm -f "$TEMP_FILE"
echo "Clean body after stripping quotes/code:"
echo "$CLEAN_BODY"
echo "---"
# Check for trigger words in clean text
# Trigger: @mirrobot or @mirrobot-agent
if echo "$CLEAN_BODY" | grep -qE '@mirrobot(-agent)?'; then
echo "::notice::Valid trigger found in non-quoted, non-code text."
echo "should_proceed=true" >> $GITHUB_OUTPUT
else
echo "::notice::Trigger only found in quotes/code blocks. Skipping."
echo "should_proceed=false" >> $GITHUB_OUTPUT
fi
- name: Checkout repository
if: steps.validate.outputs.should_proceed == 'true'
uses: actions/checkout@v4
- name: Bot Setup
if: steps.validate.outputs.should_proceed == 'true'
id: setup
uses: ./.github/actions/bot-setup
with:
bot-app-id: ${{ secrets.BOT_APP_ID }}
bot-private-key: ${{ secrets.BOT_PRIVATE_KEY }}
opencode-api-key: ${{ secrets.OPENCODE_API_KEY }}
opencode-model: ${{ secrets.OPENCODE_MODEL }}
opencode-fast-model: ${{ secrets.OPENCODE_FAST_MODEL }}
custom-providers-json: ${{ secrets.CUSTOM_PROVIDERS_JSON }}
- name: Add reaction to comment
if: steps.validate.outputs.should_proceed == 'true'
env:
GH_TOKEN: ${{ steps.setup.outputs.token }}
run: |
gh api \
--method POST \
-H "Accept: application/vnd.github+json" \
/repos/${{ github.repository }}/issues/comments/${{ github.event.comment.id }}/reactions \
-f content='eyes'
- name: Gather Full Thread Context
if: steps.validate.outputs.should_proceed == 'true'
id: context
env:
GH_TOKEN: ${{ steps.setup.outputs.token }}
BOT_NAMES_JSON: ${{ env.BOT_NAMES_JSON }}
IGNORE_BOT_NAMES_JSON: ${{ env.IGNORE_BOT_NAMES_JSON }}
run: |
# Common Info
echo "NEW_COMMENT_AUTHOR=${{ github.event.comment.user.login }}" >> $GITHUB_ENV
# Use a unique delimiter for safety
COMMENT_DELIMITER="GH_BODY_DELIMITER_$(openssl rand -hex 8)"
{ echo "NEW_COMMENT_BODY<<$COMMENT_DELIMITER"; echo "${{ github.event.comment.body }}"; echo "$COMMENT_DELIMITER"; } >> "$GITHUB_ENV"
# Determine if PR or Issue
if [ -n '${{ github.event.issue.pull_request }}' ]; then
IS_PR="true"
else
IS_PR="false"
fi
echo "IS_PR=$IS_PR" >> $GITHUB_OUTPUT
# Define a unique, random delimiter for the main context block
CONTEXT_DELIMITER="GH_CONTEXT_DELIMITER_$(openssl rand -hex 8)"
# Fetch and Format Context based on type
if [[ "$IS_PR" == "true" ]]; then
# Fetch PR data
pr_json=$(gh pr view ${{ env.THREAD_NUMBER }} --repo ${{ github.repository }} --json author,title,body,createdAt,state,headRefName,baseRefName,headRefOid,additions,deletions,commits,files,closingIssuesReferences,headRepository)
# Debug: Output pr_json and review_comments_json for inspection
echo "$pr_json" > pr_json.txt
# Fetch timeline data to find cross-references
timeline_data=$(gh api "/repos/${{ github.repository }}/issues/${{ env.THREAD_NUMBER }}/timeline")
repo_owner="${GITHUB_REPOSITORY%/*}"
repo_name="${GITHUB_REPOSITORY#*/}"
GRAPHQL_QUERY='query($owner:String!, $name:String!, $number:Int!, $commentLimit:Int!, $reviewLimit:Int!, $threadLimit:Int!, $threadCommentLimit:Int!) {
repository(owner: $owner, name: $name) {
pullRequest(number: $number) {
comments(last: $commentLimit) {
nodes {
databaseId
author { login }
body
createdAt
isMinimized
minimizedReason
}
}
reviews(last: $reviewLimit) {
nodes {
databaseId
author { login }
body
state
submittedAt
isMinimized
minimizedReason
}
}
reviewThreads(last: $threadLimit) {
nodes {
id
isResolved
isOutdated
comments(last: $threadCommentLimit) {
nodes {
databaseId
author { login }
body
createdAt
path
line
originalLine
diffHunk
isMinimized
minimizedReason
pullRequestReview {
databaseId
isMinimized
minimizedReason
}
}
}
}
}
}
}
}'
discussion_data=$(gh api graphql \
-F owner="$repo_owner" \
-F name="$repo_name" \
-F number=${{ env.THREAD_NUMBER }} \
-F commentLimit=${{ env.COMMENT_FETCH_LIMIT }} \
-F reviewLimit=${{ env.REVIEW_FETCH_LIMIT }} \
-F threadLimit=${{ env.REVIEW_THREAD_FETCH_LIMIT }} \
-F threadCommentLimit=${{ env.THREAD_COMMENT_FETCH_LIMIT }} \
-f query="$GRAPHQL_QUERY")
echo "$discussion_data" > discussion_data.txt
# For prompt context
echo "PR_HEAD_SHA=$(echo "$pr_json" | jq -r .headRefOid)" >> $GITHUB_ENV
echo "THREAD_AUTHOR=$(echo "$pr_json" | jq -r .author.login)" >> $GITHUB_ENV
echo "BASE_BRANCH=$(echo "$pr_json" | jq -r .baseRefName)" >> $GITHUB_ENV
# Prepare all variables from JSON
author=$(echo "$pr_json" | jq -r .author.login)
created_at=$(echo "$pr_json" | jq -r .createdAt)
base_branch=$(echo "$pr_json" | jq -r .baseRefName)
head_branch=$(echo "$pr_json" | jq -r .headRefName)
state=$(echo "$pr_json" | jq -r .state)
additions=$(echo "$pr_json" | jq -r .additions)
deletions=$(echo "$pr_json" | jq -r .deletions)
total_commits=$(echo "$pr_json" | jq -r '.commits | length')
changed_files_count=$(echo "$pr_json" | jq -r '.files | length')
title=$(echo "$pr_json" | jq -r .title)
body=$(echo "$pr_json" | jq -r '.body // "(No description provided)"')
# Prepare changed files list
# Build changed files list with correct jq interpolations for additions and deletions
# Previous pattern had a missing backslash before the deletions interpolation, leaving a literal '((.deletions))'.
changed_files_list=$(echo "$pr_json" | jq -r '.files[] | "- \(.path) (MODIFIED) +\((.additions))/-\((.deletions))"')
# Prepare general PR comments (exclude ignored bots)
comments=$(echo "$discussion_data" | jq -r --argjson ignored "$IGNORE_BOT_NAMES_JSON" '
((.data.repository.pullRequest.comments.nodes // [])
| map(select((.isMinimized != true) and (((.author.login? // "unknown") as $login | $ignored | index($login)) | not))))
| if length > 0 then
map("- " + (.author.login? // "unknown") + " at " + (.createdAt // "N/A") + ":\n" + ((.body // "") | tostring) + "\n")
| join("")
else
"No general comments."
end')
# ===== ACCURATE FILTERING & COUNTING (Fixed math logic) =====
stats_json=$(echo "$discussion_data" | jq -r --argjson ignored "$IGNORE_BOT_NAMES_JSON" '
# Define filter logic
def is_valid_review:
(.author.login? // "unknown") as $login | $ignored | index($login) | not
and (.isMinimized != true);
def is_valid_comment:
.isResolved != true
and .isOutdated != true
and (((.comments.nodes // []) | first | .isMinimized) != true)
and ((((.comments.nodes // []) | first | .pullRequestReview.isMinimized) // false) != true);
def is_valid_inline:
.isMinimized != true
and ((.pullRequestReview.isMinimized // false) != true)
and (((.author.login? // "unknown") as $login | $ignored | index($login)) | not);
# Calculate Reviews
def raw_reviews: (.data.repository.pullRequest.reviews.nodes // []);
def total_reviews: (raw_reviews | length);
def included_reviews: ([raw_reviews[]? | select(is_valid_review)] | length);
# Calculate Review Comments
def raw_threads: (.data.repository.pullRequest.reviewThreads.nodes // []);
def valid_threads: (raw_threads | map(select(is_valid_comment)));
def all_valid_comments: (valid_threads | map(.comments.nodes // []) | flatten | map(select(is_valid_inline)));
# We count total comments as "active/unresolved threads comments"
def total_review_comments: (raw_threads | map(select(.isResolved != true and .isOutdated != true)) | map(.comments.nodes // []) | flatten | length);
def included_review_comments: (all_valid_comments | length);
{
total_reviews: total_reviews,
included_reviews: included_reviews,
excluded_reviews: (total_reviews - included_reviews),
total_review_comments: total_review_comments,
included_review_comments: included_review_comments,
excluded_comments: (total_review_comments - included_review_comments)
}
')
# Export stats to env vars
filtered_reviews=$(echo "$stats_json" | jq .included_reviews)
excluded_reviews=$(echo "$stats_json" | jq .excluded_reviews)
filtered_comments=$(echo "$stats_json" | jq .included_review_comments)
excluded_comments=$(echo "$stats_json" | jq .excluded_comments)
echo "✓ Filtered reviews: $filtered_reviews included, $excluded_reviews excluded (ignored bots/hidden)"
echo "✓ Filtered review comments: $filtered_comments included, $excluded_comments excluded (outdated/hidden)"
# Reviews Text
review_filter_err=$(mktemp 2>/dev/null || echo "/tmp/review_filter_err.log")
if reviews=$(echo "$discussion_data" | jq -r --argjson ignored "$IGNORE_BOT_NAMES_JSON" '
if ((((.data.repository.pullRequest.reviews.nodes // []) | length) > 0)) then
((.data.repository.pullRequest.reviews.nodes // [])[]?
| select(
((.author.login? // "unknown") as $login | $ignored | index($login) | not)
and (.isMinimized != true)
)
| "- " + (.author.login? // "unknown") + " at " + (.submittedAt // "N/A") + ":\n - Review body: " + (.body // "(No summary comment)") + "\n - State: " + (.state // "UNKNOWN") + "\n")
else
"No formal reviews."
end' 2>"$review_filter_err"); then
if [ -s "$review_filter_err" ]; then
echo "::debug::jq stderr (reviews) emitted output:"
cat "$review_filter_err"
fi
else
echo "::warning::Review formatting failed, using unfiltered data"
reviews="Error processing reviews."
echo "FILTER_ERROR_REVIEWS=true" >> $GITHUB_ENV
fi
rm -f "$review_filter_err" || true
# Review Comments Text
review_comment_filter_err=$(mktemp 2>/dev/null || echo "/tmp/review_comment_filter_err.log")
if review_comments=$(echo "$discussion_data" | jq -r --argjson ignored "$IGNORE_BOT_NAMES_JSON" '
((.data.repository.pullRequest.reviewThreads.nodes // [])
| map(select(
.isResolved != true and .isOutdated != true
and (((.comments.nodes // []) | first | .isMinimized) != true)
and ((((.comments.nodes // []) | first | .pullRequestReview.isMinimized) // false) != true)
))
| map(.comments.nodes // [])
| flatten
| map(select((.isMinimized != true)
and ((.pullRequestReview.isMinimized // false) != true)
and (((.author.login? // "unknown") as $login | $ignored | index($login)) | not))))
| if length > 0 then
map("- " + (.author.login? // "unknown") + " at " + (.createdAt // "N/A") + " (" + (.path // "Unknown file") + ":" + ((.line // .originalLine // "N/A") | tostring) + "):\n " + ((.body // "") | tostring) + "\n")
| join("")
else
"No inline review comments."
end' 2>"$review_comment_filter_err"); then
if [ -s "$review_comment_filter_err" ]; then
echo "::debug::jq stderr (review comments) emitted output:"
cat "$review_comment_filter_err"
fi
else
echo "::warning::Review comment formatting failed"
review_comments="Error processing review comments."
echo "FILTER_ERROR_COMMENTS=true" >> $GITHUB_ENV
fi
rm -f "$review_comment_filter_err" || true
# Store filtering statistics
echo "EXCLUDED_REVIEWS=$excluded_reviews" >> $GITHUB_ENV
echo "EXCLUDED_COMMENTS=$excluded_comments" >> $GITHUB_ENV
# Build filtering summary
filter_summary="Context filtering applied: ${excluded_reviews:-0} reviews and ${excluded_comments:-0} review comments excluded from this context."
if [ "${FILTER_ERROR_REVIEWS}" = "true" ] || [ "${FILTER_ERROR_COMMENTS}" = "true" ]; then
filter_summary="$filter_summary"$'\n'"Warning: Some filtering operations encountered errors. Context may include items that should have been filtered."
fi
# Prepare linked issues robustly by fetching each one individually.
linked_issues_content=""
issue_numbers=$(echo "$pr_json" | jq -r '.closingIssuesReferences[].number')
if [ -z "$issue_numbers" ]; then
linked_issues="No issues are formally linked for closure by this PR."
else
for number in $issue_numbers; do
# Fetch each issue's data separately. This is more reliable for cross-repo issues or permission nuances.
issue_details_json=$(gh issue view "$number" --repo "${{ github.repository }}" --json title,body 2>/dev/null || echo "{}")
issue_title=$(echo "$issue_details_json" | jq -r '.title // "Title not available"')
issue_body=$(echo "$issue_details_json" | jq -r '.body // "Body not available"')
linked_issues_content+=$(printf "<issue>\n <number>#%s</number>\n <title>%s</title>\n <body>\n%s\n</body>\n</issue>\n" "$number" "$issue_title" "$issue_body")
done
linked_issues=$linked_issues_content
fi
# Prepare cross-references from timeline data
references=$(echo "$timeline_data" | jq -r '.[] | select(.event == "cross-referenced") | .source.issue | "- Mentioned in \(.html_url | if contains("/pull/") then "PR" else "Issue" end): #\(.number) - \(.title)"')
if [ -z "$references" ]; then references="This PR has not been mentioned in other issues or PRs."; fi
# Step 1: Write the header for the multi-line environment variable
echo "THREAD_CONTEXT<<$CONTEXT_DELIMITER" >> "$GITHUB_ENV"
# Step 2: Append the content line by line
echo "Type: Pull Request" >> "$GITHUB_ENV"
echo "PR Number: #${{ env.THREAD_NUMBER }}" >> "$GITHUB_ENV"
echo "Title: $title" >> "$GITHUB_ENV"
echo "Author: $author" >> "$GITHUB_ENV"
echo "Created At: $created_at" >> "$GITHUB_ENV"
echo "Base Branch (target): $base_branch" >> "$GITHUB_ENV"
echo "Head Branch (source): $head_branch" >> "$GITHUB_ENV"
echo "State: $state" >> "$GITHUB_ENV"
echo "Additions: $additions" >> "$GITHUB_ENV"
echo "Deletions: $deletions" >> "$GITHUB_ENV"
echo "Total Commits: $total_commits" >> "$GITHUB_ENV"
echo "Changed Files: $changed_files_count files" >> "$GITHUB_ENV"
echo "<pull_request_body>" >> "$GITHUB_ENV"
echo "$title" >> "$GITHUB_ENV"
echo "---" >> "$GITHUB_ENV"
echo "$body" >> "$GITHUB_ENV"
echo "</pull_request_body>" >> "$GITHUB_ENV"
echo "<pull_request_comments>" >> "$GITHUB_ENV"
echo "$comments" >> "$GITHUB_ENV"
echo "</pull_request_comments>" >> "$GITHUB_ENV"
echo "<pull_request_reviews>" >> "$GITHUB_ENV"
echo "$reviews" >> "$GITHUB_ENV"
echo "</pull_request_reviews>" >> "$GITHUB_ENV"
echo "<pull_request_review_comments>" >> "$GITHUB_ENV"
echo "$review_comments" >> "$GITHUB_ENV"
echo "</pull_request_review_comments>" >> "$GITHUB_ENV"
echo "<pull_request_changed_files>" >> "$GITHUB_ENV"
echo "$changed_files_list" >> "$GITHUB_ENV"
echo "</pull_request_changed_files>" >> "$GITHUB_ENV"
echo "<linked_issues>" >> "$GITHUB_ENV"
echo "$linked_issues" >> "$GITHUB_ENV"
echo "</linked_issues>" >> "$GITHUB_ENV"
# Step 3: Write the closing delimiter
# Add cross-references and filtering summary to the final context
echo "<cross_references>" >> "$GITHUB_ENV"
echo "$references" >> "$GITHUB_ENV"
echo "</cross_references>" >> "$GITHUB_ENV"
echo "<filtering_summary>" >> "$GITHUB_ENV"
echo "$filter_summary" >> "$GITHUB_ENV"
echo "</filtering_summary>" >> "$GITHUB_ENV"
echo "$CONTEXT_DELIMITER" >> "$GITHUB_ENV"
else # It's an Issue
issue_data=$(gh issue view ${{ env.THREAD_NUMBER }} --repo ${{ github.repository }} --json author,title,body,createdAt,state,comments)
timeline_data=$(gh api "/repos/${{ github.repository }}/issues/${{ env.THREAD_NUMBER }}/timeline")
echo "THREAD_AUTHOR=$(echo "$issue_data" | jq -r .author.login)" >> $GITHUB_ENV
# Prepare metadata
author=$(echo "$issue_data" | jq -r .author.login)
created_at=$(echo "$issue_data" | jq -r .createdAt)
state=$(echo "$issue_data" | jq -r .state)
title=$(echo "$issue_data" | jq -r .title)
body=$(echo "$issue_data" | jq -r '.body // "(No description provided)"')
# Prepare comments (exclude ignored bots)
comments=$(echo "$issue_data" | jq -r --argjson ignored "$IGNORE_BOT_NAMES_JSON" 'if (((.comments // []) | length) > 0) then ((.comments[]? | select((.author.login as $login | $ignored | index($login)) | not)) | "- " + (.author.login // "unknown") + " at " + (.createdAt // "N/A") + ":\n" + ((.body // "") | tostring) + "\n") else "No comments have been posted yet." end')
# Prepare cross-references
references=$(echo "$timeline_data" | jq -r '.[] | select(.event == "cross-referenced") | .source.issue | "- Mentioned in \(.html_url | if contains("/pull/") then "PR" else "Issue" end): #\(.number) - \(.title)"')
if [ -z "$references" ]; then references="No other issues or PRs have mentioned this thread."; fi
# Step 1: Write the header
echo "THREAD_CONTEXT<<$CONTEXT_DELIMITER" >> "$GITHUB_ENV"
# Step 2: Append the content line by line
echo "Type: Issue" >> "$GITHUB_ENV"
echo "Issue Number: #${{ env.THREAD_NUMBER }}" >> "$GITHUB_ENV"
echo "Title: $title" >> "$GITHUB_ENV"
echo "Author: $author" >> "$GITHUB_ENV"
echo "Created At: $created_at" >> "$GITHUB_ENV"
echo "State: $state" >> "$GITHUB_ENV"
echo "<issue_body>" >> "$GITHUB_ENV"
echo "$body" >> "$GITHUB_ENV"
echo "</issue_body>" >> "$GITHUB_ENV"
echo "<issue_comments>" >> "$GITHUB_ENV"
echo "$comments" >> "$GITHUB_ENV"
echo "</issue_comments>" >> "$GITHUB_ENV"
echo "<cross_references>" >> "$GITHUB_ENV"
echo "$references" >> "$GITHUB_ENV"
echo "</cross_references>" >> "$GITHUB_ENV"
# Step 3: Write the footer
echo "$CONTEXT_DELIMITER" >> "$GITHUB_ENV"
fi
- name: Clear pending bot review
if: steps.validate.outputs.should_proceed == 'true' && steps.context.outputs.IS_PR == 'true'
env:
GH_TOKEN: ${{ steps.setup.outputs.token }}
BOT_NAMES_JSON: ${{ env.BOT_NAMES_JSON }}
run: |
pending_review_ids=$(gh api --paginate \
"/repos/${GITHUB_REPOSITORY}/pulls/${{ env.THREAD_NUMBER }}/reviews" \
| jq -r --argjson bots "$BOT_NAMES_JSON" '.[]? | select((.state // "") == "PENDING" and (((.user.login // "") as $login | $bots | index($login)))) | .id' \
| sort -u)
if [ -z "$pending_review_ids" ]; then
echo "No pending bot reviews to clear."
exit 0
fi
while IFS= read -r review_id; do
[ -z "$review_id" ] && continue
if gh api \
--method DELETE \
-H "Accept: application/vnd.github+json" \
"/repos/${GITHUB_REPOSITORY}/pulls/${{ env.THREAD_NUMBER }}/reviews/$review_id"; then
echo "Cleared pending review $review_id"
else
echo "::warning::Failed to clear pending review $review_id"
fi
done <<< "$pending_review_ids"
- name: Determine Review Type and Last Reviewed SHA
if: steps.validate.outputs.should_proceed == 'true' && steps.context.outputs.IS_PR == 'true'
id: review_type
env:
GH_TOKEN: ${{ steps.setup.outputs.token }}
BOT_NAMES_JSON: ${{ env.BOT_NAMES_JSON }}
run: |
pr_summary_payload=$(gh pr view ${{ env.THREAD_NUMBER }} --repo ${{ github.repository }} --json comments,reviews)
detect_json=$(echo "$pr_summary_payload" | jq -c --argjson bots "$BOT_NAMES_JSON" '
def ts(x): if (x//""=="") then null else x end;
def items:
[ (.comments[]? | select(.author.login as $a | $bots | index($a)) | {type:"comment", body:(.body//""), ts:(.updatedAt // .createdAt // "")} ),
(.reviews[]? | select(.author.login as $a | $bots | index($a)) | {type:"review", body:(.body//""), ts:(.submittedAt // .updatedAt // .createdAt // "")} )
] | sort_by(.ts) | .;
def has_phrase: (.body//"") | test("This review was generated by an AI assistant\\.?");
def has_marker: (.body//"") | test("<!--\\s*last_reviewed_sha:[a-f0-9]{7,40}\\s*-->");
{ latest_phrase: (items | map(select(has_phrase)) | last // {}),
latest_marker: (items | map(select(has_marker)) | last // {}) }
')
latest_phrase_ts=$(echo "$detect_json" | jq -r '.latest_phrase.ts // ""')
latest_marker_ts=$(echo "$detect_json" | jq -r '.latest_marker.ts // ""')
latest_marker_body=$(echo "$detect_json" | jq -r '.latest_marker.body // ""')
echo "is_first_review=false" >> $GITHUB_OUTPUT
resolved_sha=""
if [ -z "$latest_phrase_ts" ] && [ -z "$latest_marker_ts" ]; then
echo "is_first_review=true" >> $GITHUB_OUTPUT
fi
if [ -n "$latest_marker_ts" ] && { [ -z "$latest_phrase_ts" ] || [ "$latest_marker_ts" \> "$latest_phrase_ts" ] || [ "$latest_marker_ts" = "$latest_phrase_ts" ]; }; then
resolved_sha=$(printf "%s" "$latest_marker_body" | sed -nE 's/.*<!--\s*last_reviewed_sha:([a-f0-9]{7,40})\s*-->.*/\1/p' | head -n1)
fi
if [ -z "$resolved_sha" ] && [ -n "$latest_phrase_ts" ]; then
reviews_json=$(gh api "/repos/${{ github.repository }}/pulls/${{ env.THREAD_NUMBER }}/reviews" || echo '[]')
resolved_sha=$(echo "$reviews_json" | jq -r --argjson bots "$BOT_NAMES_JSON" '[.[] | select((.user.login // "") as $u | $bots | index($u)) | .commit_id] | last // ""')
fi
if [ -n "$resolved_sha" ]; then
echo "last_reviewed_sha=$resolved_sha" >> $GITHUB_OUTPUT
echo "$resolved_sha" > last_review_sha.txt
else
echo "last_reviewed_sha=" >> $GITHUB_OUTPUT
echo "" > last_review_sha.txt
fi
- name: Save secure prompt from base branch
if: steps.validate.outputs.should_proceed == 'true'
run: cp .github/prompts/bot-reply.md /tmp/bot-reply.md
- name: Checkout PR head
if: steps.validate.outputs.should_proceed == 'true' && steps.context.outputs.IS_PR == 'true'
uses: actions/checkout@v4
with:
ref: ${{ env.PR_HEAD_SHA }}
token: ${{ steps.setup.outputs.token }}
fetch-depth: 0 # Full history needed for git operations and code analysis
- name: Generate PR Diffs (Full and Incremental)
if: steps.validate.outputs.should_proceed == 'true' && steps.context.outputs.IS_PR == 'true'
id: generate_diffs
env:
BASE_BRANCH: ${{ env.BASE_BRANCH }}
run: |
mkdir -p "$GITHUB_WORKSPACE/.mirrobot_files"
BASE_BRANCH="${BASE_BRANCH}"
CURRENT_SHA="${PR_HEAD_SHA}"
LAST_SHA="${{ steps.review_type.outputs.last_reviewed_sha }}"
# Always generate full diff against base branch
echo "Generating full PR diff against base branch: $BASE_BRANCH"
if git fetch origin "$BASE_BRANCH":refs/remotes/origin/"$BASE_BRANCH" 2>/dev/null; then
if MERGE_BASE=$(git merge-base origin/"$BASE_BRANCH" "$CURRENT_SHA" 2>/dev/null); then
if DIFF_CONTENT=$(git diff --patch "$MERGE_BASE".."$CURRENT_SHA" 2>/dev/null); then
DIFF_SIZE=${#DIFF_CONTENT}
if [ $DIFF_SIZE -gt 500000 ]; then
TRUNCATION_MSG=$'\n\n[DIFF TRUNCATED - PR is very large. Showing first 500KB only. Review scaled to high-impact areas.]'
DIFF_CONTENT="${DIFF_CONTENT:0:500000}${TRUNCATION_MSG}"
fi
echo "$DIFF_CONTENT" > "$GITHUB_WORKSPACE/.mirrobot_files/first_review_diff.txt"
echo "Full diff generated ($(echo "$DIFF_CONTENT" | wc -l) lines)"
else
echo "(Diff generation failed. Please refer to the changed files list above.)" > "$GITHUB_WORKSPACE/.mirrobot_files/first_review_diff.txt"
fi
else
echo "(No common ancestor found. This might be a new branch or orphaned commits.)" > "$GITHUB_WORKSPACE/.mirrobot_files/first_review_diff.txt"
fi
else
echo "(Base branch not available for diff. Please refer to the changed files list above.)" > "$GITHUB_WORKSPACE/.mirrobot_files/first_review_diff.txt"
fi
# Generate incremental diff if this is a follow-up review
if [ -n "$LAST_SHA" ]; then
echo "Generating incremental diff from $LAST_SHA to $CURRENT_SHA"
if git fetch origin $LAST_SHA 2>/dev/null || git cat-file -e $LAST_SHA^{commit} 2>/dev/null; then
if DIFF_CONTENT=$(git diff --patch $LAST_SHA..$CURRENT_SHA 2>/dev/null); then
DIFF_SIZE=${#DIFF_CONTENT}
if [ $DIFF_SIZE -gt 500000 ]; then
TRUNCATION_MSG=$'\n\n[DIFF TRUNCATED - Changes are very large. Showing first 500KB only.]'
DIFF_CONTENT="${DIFF_CONTENT:0:500000}${TRUNCATION_MSG}"
fi
echo "$DIFF_CONTENT" > "$GITHUB_WORKSPACE/.mirrobot_files/incremental_diff.txt"
echo "Incremental diff generated ($(echo "$DIFF_CONTENT" | wc -l) lines)"
else
echo "(Unable to generate incremental diff.)" > "$GITHUB_WORKSPACE/.mirrobot_files/incremental_diff.txt"
fi
else
echo "(Last reviewed SHA not accessible for incremental diff.)" > "$GITHUB_WORKSPACE/.mirrobot_files/incremental_diff.txt"
fi
else
echo "(No previous review - incremental diff not applicable.)" > "$GITHUB_WORKSPACE/.mirrobot_files/incremental_diff.txt"
fi
- name: Checkout repository (for issues)
if: steps.validate.outputs.should_proceed == 'true' && steps.context.outputs.IS_PR == 'false'
uses: actions/checkout@v4
with:
token: ${{ steps.setup.outputs.token }}
fetch-depth: 0 # Full history needed for git operations and code analysis
- name: Analyze comment and respond
if: steps.validate.outputs.should_proceed == 'true'
env:
GITHUB_TOKEN: ${{ steps.setup.outputs.token }}
THREAD_CONTEXT: ${{ env.THREAD_CONTEXT }}
NEW_COMMENT_AUTHOR: ${{ env.NEW_COMMENT_AUTHOR }}
NEW_COMMENT_BODY: ${{ env.NEW_COMMENT_BODY }}
THREAD_NUMBER: ${{ env.THREAD_NUMBER }}
GITHUB_REPOSITORY: ${{ github.repository }}
THREAD_AUTHOR: ${{ env.THREAD_AUTHOR }}
PR_HEAD_SHA: ${{ env.PR_HEAD_SHA }}
IS_FIRST_REVIEW: ${{ steps.review_type.outputs.is_first_review }}
OPENCODE_PERMISSION: |
{
"bash": {
"gh*": "allow",
"git*": "allow",
"jq*": "allow"
},
"external_directory": "allow",
"webfetch": "deny"
}
run: |
# Only substitute the variables we intend; leave example $vars and secrets intact
if [ "${{ steps.context.outputs.IS_PR }}" = "true" ]; then
FULL_DIFF_PATH="$GITHUB_WORKSPACE/.mirrobot_files/first_review_diff.txt"
INCREMENTAL_DIFF_PATH="$GITHUB_WORKSPACE/.mirrobot_files/incremental_diff.txt"
LAST_REVIEWED_SHA="${{ steps.review_type.outputs.last_reviewed_sha }}"
else
FULL_DIFF_PATH=""
INCREMENTAL_DIFF_PATH=""
LAST_REVIEWED_SHA=""
fi
VARS='$THREAD_CONTEXT $NEW_COMMENT_AUTHOR $NEW_COMMENT_BODY $THREAD_NUMBER $GITHUB_REPOSITORY $THREAD_AUTHOR $PR_HEAD_SHA $IS_FIRST_REVIEW $FULL_DIFF_PATH $INCREMENTAL_DIFF_PATH $LAST_REVIEWED_SHA'
FULL_DIFF_PATH="$FULL_DIFF_PATH" INCREMENTAL_DIFF_PATH="$INCREMENTAL_DIFF_PATH" LAST_REVIEWED_SHA="$LAST_REVIEWED_SHA" envsubst "$VARS" < /tmp/bot-reply.md | opencode run --share -