@@ -2,76 +2,160 @@ name: Test Doc Generator
22
33on :
44 workflow_dispatch :
5+ inputs :
6+ target_branch :
7+ description : ' The branch in appsmith-docs to checkout and create PR against'
8+ required : true
9+ default : ' docs-staging' # Default to docs-staging
10+ type : string
511
612jobs :
713 generate_docs :
814 runs-on : ubuntu-latest
915
1016 steps :
11- - name : Checkout appsmith-docs
17+ - name : Checkout appsmith-docs target branch
1218 uses : actions/checkout@v4
1319 with :
1420 token : ${{ secrets.test_REPO_ACCESS_TOKEN }}
21+ ref : ${{ github.event.inputs.target_branch }} # Checkout the specified branch
22+ # Fetch depth 0 to get all history needed for base branch detection by create-pull-request
23+ fetch-depth : 0
1524
16- - name : Create exclusion list
17- run : echo > saas_exclusions.txt
18-
19- - name : Ensure scripts directory exists
25+ - name : Ensure scripts directory and tracking files exist
2026 run : |
2127 mkdir -p scripts
28+ # Initialize tracking files if they don't exist in the checked-out branch
2229 [ -f scripts/processed_files.txt ] || touch scripts/processed_files.txt
2330 [ -f scripts/file_hashes.json ] || echo "{}" > scripts/file_hashes.json
2431
2532 - name : Fetch file list from test repo
2633 id : fetch_files
2734 run : |
28- curl -s --max-time 30 -H "Authorization: Bearer ${{ secrets.test_REPO_ACCESS_TOKEN }}" \
35+ echo "Fetching files from source repo..."
36+ curl -s --max-time 60 -H "Authorization: Bearer ${{ secrets.test_REPO_ACCESS_TOKEN }}" \
2937 -H "Accept: application/vnd.github+json" \
3038 https://api.github.com/repos/harshilp24/integration-resources-test/contents/Generic%20UQI%20Creation/uqi_configs \
3139 -o response.json
3240
33- jq -r '.[] | select(.type=="file") | [.name, .sha] | @tsv' response.json > latest_files_with_sha.txt
34- jq -r '.[] | select(.type=="file") | .name' response.json > latest_files.txt
41+ if ! jq -e '.' response.json > /dev/null; then
42+ echo "Error: Invalid JSON received from GitHub API."
43+ cat response.json # Print response for debugging
44+ exit 1
45+ fi
3546
36- echo "files_found=true" >> $GITHUB_ENV
47+ # Check if the response is an array (list of files) or an object (error message)
48+ if jq -e 'type == "array"' response.json > /dev/null; then
49+ jq -r '.[] | select(.type=="file") | [.name, .sha] | @tsv' response.json > latest_files_with_sha.txt
50+ jq -r '.[] | select(.type=="file") | .name' response.json > latest_files.txt
51+ echo "files_found=true" >> $GITHUB_ENV
52+ echo "Files list fetched successfully."
53+ else
54+ echo "Warning: Received non-array response from GitHub API (maybe empty dir or error?):"
55+ cat response.json
56+ # Create empty files to avoid errors downstream if dir is empty
57+ touch latest_files_with_sha.txt
58+ touch latest_files.txt
59+ echo "files_found=false" >> $GITHUB_ENV # Indicate no files found
60+ fi
3761
3862 - name : Identify new and modified files
3963 id : detect_changes
64+ # Only run if files were actually found in the source repo
65+ if : env.files_found == 'true'
4066 run : |
67+ echo "Identifying changes against branch: ${{ github.event.inputs.target_branch }}"
68+ # Read tracking files FROM THE CHECKED-OUT BRANCH
4169 PREV_HASHES=$(cat scripts/file_hashes.json)
42- NEW_FILES=$(comm -23 <(sort latest_files.txt) <(sort scripts/processed_files.txt) || true)
43- MODIFIED_FILES=""
70+ # Ensure processed_files.txt exists before sorting
71+ [ -f scripts/processed_files.txt ] || touch scripts/processed_files.txt
72+
73+ # Find files present in latest_files.txt but not in processed_files.txt
74+ comm -23 <(sort latest_files.txt) <(sort scripts/processed_files.txt) > new_files.tmp || true
75+ echo "--- New Files ---"
76+ cat new_files.tmp
77+ echo "-----------------"
78+
79+ MODIFIED_FILES_LIST="modified_files.tmp"
80+ touch $MODIFIED_FILES_LIST
81+ echo "--- Checking for Modifications ---" >&2 # Debug output to stderr
4482 while IFS=$'\t' read -r FILE_NAME FILE_SHA; do
45- PREV_SHA=$(echo "$PREV_HASHES" | jq -r --arg file "$FILE_NAME" '.[$file] // ""')
46- if [ -n "$PREV_SHA" ] && [ "$PREV_SHA" != "$FILE_SHA" ] && grep -q "^$FILE_NAME$" scripts/processed_files.txt; then
47- MODIFIED_FILES="$MODIFIED_FILES$FILE_NAME"$'\n'
83+ # Check if the file is listed in processed_files.txt (meaning it's not new)
84+ if grep -q -x -F "$FILE_NAME" scripts/processed_files.txt; then
85+ PREV_SHA=$(echo "$PREV_HASHES" | jq -r --arg file "$FILE_NAME" '.["$file"] // ""')
86+ echo "Checking: $FILE_NAME, Current SHA: $FILE_SHA, Previous SHA: $PREV_SHA" >&2
87+ if [ -n "$PREV_SHA" ] && [ "$PREV_SHA" != "$FILE_SHA" ]; then
88+ echo "$FILE_NAME" >> $MODIFIED_FILES_LIST
89+ echo " -> Marked as modified." >&2
90+ fi
4891 fi
4992 done < latest_files_with_sha.txt
50- { echo "$NEW_FILES"; echo "$MODIFIED_FILES"; } | grep -v "^$" > files_to_process.txt
93+ echo "--- Modified Files ---"
94+ cat $MODIFIED_FILES_LIST
95+ echo "----------------------"
96+
97+ # Combine new and modified files, ensuring uniqueness and removing empty lines
98+ cat new_files.tmp $MODIFIED_FILES_LIST | sort | uniq | grep -v '^$' > files_to_process.txt || true
99+
100+ echo "--- Files to Process ---"
101+ cat files_to_process.txt
102+ echo "------------------------"
103+
51104 if [ -s files_to_process.txt ]; then
52105 echo "changes_found=true" >> $GITHUB_ENV
106+ echo "Changes detected."
53107 else
54108 echo "changes_found=false" >> $GITHUB_ENV
109+ echo "No new or modified files detected."
55110 fi
111+ # Clean up temporary files
112+ rm -f new_files.tmp modified_files.tmp
56113
57114 - name : Exit if no files to process
58115 if : env.changes_found != 'true'
59- run : exit 0
116+ run : |
117+ echo "No changes detected in source files relative to branch '${{ github.event.inputs.target_branch }}'. Exiting."
118+ exit 0
60119
61120 - name : Process files with OpenAI
121+ # This step now correctly reads the initial hashes from the checked-out branch
122+ # and updates the local files, which are then committed in the next step.
123+ if : env.changes_found == 'true'
62124 run : |
63125 mkdir -p generated_docs
126+ # Read initial hashes from the checked-out branch state
64127 HASHES_JSON=$(cat scripts/file_hashes.json)
65128 PROCESSED_COUNT=0
66129
67130 while IFS= read -r FILE_NAME; do
131+ # Ensure FILE_NAME is not empty
132+ if [ -z "$FILE_NAME" ]; then
133+ continue
134+ fi
135+
68136 echo "⏳ Processing $FILE_NAME"
69- FILE_URL="https://raw.githubusercontent.com/harshilp24/integration-resources-test/main/Generic%20UQI%20Creation/uqi_configs/$FILE_NAME"
70- curl -sSL --max-time 30 "$FILE_URL" -o input_file.json
137+ # URL encode the filename for the URL
138+ ENCODED_FILE_NAME=$(printf '%s' "$FILE_NAME" | jq -sRr @uri)
139+ FILE_URL="https://raw.githubusercontent.com/harshilp24/integration-resources-test/main/Generic%20UQI%20Creation/uqi_configs/$ENCODED_FILE_NAME"
140+ echo "Fetching content from: $FILE_URL"
141+ curl -fsSL --max-time 60 "$FILE_URL" -o input_file.json
142+ if [ $? -ne 0 ]; then
143+ echo "Error: Failed to download $FILE_NAME from $FILE_URL" >&2
144+ continue # Skip this file if download fails
145+ fi
71146
72- FILE_SHA=$(grep "$FILE_NAME" latest_files_with_sha.txt | cut -f2 )
73- HASHES_JSON =$(echo "$HASHES_JSON" | jq --arg file "$FILE_NAME" --arg sha "$FILE_SHA" '.[$file] = $sha' )
147+ # Find the SHA for the current file using awk (more robust than grep+cut )
148+ FILE_SHA =$(awk -F'\t' -v filename= "$FILE_NAME" '$1 == filename { print $2; exit }' latest_files_with_sha.txt )
74149
150+ if [ -z "$FILE_SHA" ]; then
151+ echo "Warning: Could not find SHA for '$FILE_NAME' in latest_files_with_sha.txt. Skipping hash update." >&2
152+ else
153+ echo "Updating hash for $FILE_NAME to $FILE_SHA"
154+ # Update the hash in our JSON object
155+ HASHES_JSON=$(echo "$HASHES_JSON" | jq --arg file "$FILE_NAME" --arg sha "$FILE_SHA" '.["$file"] = $sha')
156+ fi
157+
158+ # --- OpenAI Processing Start ---
75159 # Prompt 1: Extract Info
76160 SYSTEM_PROMPT=$(cat .github/prompts/extract_prompt.txt || echo "Extract important integration details.")
77161 USER_CONTENT=$(cat input_file.json)
@@ -89,14 +173,18 @@ jobs:
89173 temperature: 0
90174 }')
91175
92- RESPONSE =$(curl -s https://api.openai.com/v1/chat/completions \
93- -H "Authorization: Bearer ${{ secrets.test_OPENAI_API_KEY }}" \
176+ RESPONSE1 =$(curl -s https://api.openai.com/v1/chat/completions \
177+ -H "Authorization: Bearer ${{ secrets.OPENAI_API_KEY }}" \
94178 -H "Content-Type: application/json" \
95179 -d "$PAYLOAD")
96180
97- echo "$RESPONSE" | jq '.'
98-
99- echo "$RESPONSE" | jq -r '.choices[0].message.content' > extracted_info.md
181+ # Check for API errors
182+ if echo "$RESPONSE1" | jq -e '.error' > /dev/null; then
183+ echo "Error during OpenAI Prompt 1 for $FILE_NAME:" >&2
184+ echo "$RESPONSE1" | jq '.' >&2
185+ continue # Skip this file
186+ fi
187+ echo "$RESPONSE1" | jq -r '.choices[0].message.content' > extracted_info.md
100188
101189 # Prompt 2: Generate Markdown
102190 SYSTEM_PROMPT=$(cat .github/prompts/generate_prompt.txt || echo "Generate reference documentation in markdown.")
@@ -115,44 +203,85 @@ jobs:
115203 temperature: 0.3
116204 }')
117205
118- RESPONSE =$(curl -s https://api.openai.com/v1/chat/completions \
119- -H "Authorization: Bearer ${{ secrets.test_OPENAI_API_KEY }}" \
206+ RESPONSE2 =$(curl -s https://api.openai.com/v1/chat/completions \
207+ -H "Authorization: Bearer ${{ secrets.OPENAI_API_KEY }}" \
120208 -H "Content-Type: application/json" \
121209 -d "$PAYLOAD")
122210
123- echo "$RESPONSE" | jq '.'
124-
125- echo "$RESPONSE" | jq -r '.choices[0].message.content' > generated_doc.md
211+ # Check for API errors
212+ if echo "$RESPONSE2" | jq -e '.error' > /dev/null; then
213+ echo "Error during OpenAI Prompt 2 for $FILE_NAME:" >&2
214+ echo "$RESPONSE2" | jq '.' >&2
215+ continue # Skip this file
216+ fi
217+ echo "$RESPONSE2" | jq -r '.choices[0].message.content' > generated_doc.md
218+ # --- OpenAI Processing End ---
126219
220+ # Determine output path
127221 INTEGRATION=$(echo "$FILE_NAME" | sed 's/_uqi_config\.json//' | tr '[:upper:]' '[:lower:]')
128222 FINAL_PATH="website/docs/connect-data/reference/${INTEGRATION}.md"
129223
130224 mkdir -p "$(dirname "$FINAL_PATH")"
131225 cp generated_doc.md "$FINAL_PATH"
132- cp generated_doc.md "generated_docs/${INTEGRATION}.md"
226+ # Optional: Keep a copy in a separate dir if needed for artifacts
227+ # cp generated_doc.md "generated_docs/${INTEGRATION}.md"
133228
134- echo "$FILE_NAME" >> scripts/processed_files.txt
229+ # Add the successfully processed file to the list for this run
230+ echo "$FILE_NAME" >> processed_files_this_run.txt
135231 PROCESSED_COUNT=$((PROCESSED_COUNT + 1))
136- echo "✅ Finished $FILE_NAME"
232+ echo "✅ Finished processing $FILE_NAME"
233+
137234 done < files_to_process.txt
138235
139- echo "$HASHES_JSON" > scripts/file_hashes.json
236+ # Update the main tracking files with the results of this run
237+ # Append newly processed files to the persistent list
238+ if [ -f processed_files_this_run.txt ]; then
239+ cat processed_files_this_run.txt >> scripts/processed_files.txt
240+ # Ensure uniqueness and sort the persistent list
241+ sort -u scripts/processed_files.txt -o scripts/processed_files.txt
242+ rm processed_files_this_run.txt
243+ fi
244+ # Overwrite the persistent hash file with the updated JSON
245+ echo "$HASHES_JSON" | jq '.' > scripts/file_hashes.json
246+
140247 echo "processed_count=$PROCESSED_COUNT" >> $GITHUB_ENV
141- echo "content_generated=true" >> $GITHUB_ENV
248+ if [ "$PROCESSED_COUNT" -gt 0 ]; then
249+ echo "content_generated=true" >> $GITHUB_ENV
250+ else
251+ echo "content_generated=false" >> $GITHUB_ENV
252+ fi
253+ # Clean up intermediate files
254+ rm -f input_file.json extracted_info.md generated_doc.md
142255
143- - name : Commit and open PR
256+ - name : Commit and open PR against target branch
257+ # Only run if content was actually generated in the previous step
144258 if : env.content_generated == 'true'
145- uses : peter-evans/create-pull-request@v5
259+ uses : peter-evans/create-pull-request@v6 # Use v6 for latest features/fixes
146260 with :
147261 token : ${{ secrets.test_REPO_ACCESS_TOKEN }}
148- title : " test: generate integration docs from test repo"
149- commit-message : " test: generated docs from harshilp24/integration-resources-test"
150- branch : " test/docs-update-${{ github.run_id }}"
151- base : main
262+ # Make title and commit message specific to the target branch
263+ title : " docs: update integration docs for ${{ github.event.inputs.target_branch }}"
264+ commit-message : " docs: automated generation for ${{ github.event.inputs.target_branch }}\n\n Processed files based on changes in harshilp24/integration-resources-test."
265+ # Create a branch name that includes the target branch for clarity
266+ branch : " docs-update/${{ github.event.inputs.target_branch }}-${{ github.run_id }}"
267+ # Set the base branch for the PR to the target branch
268+ base : ${{ github.event.inputs.target_branch }}
269+ # Add the generated docs and the UPDATED tracking files
152270 add-paths : |
153271 website/docs/connect-data/reference/
154272 scripts/processed_files.txt
155273 scripts/file_hashes.json
274+ # Update PR body
156275 body : |
157- ✅ Test PR: Generated integration documentation from your test repo.
158- Source: [harshilp24/integration-resources-test](https://github.com/harshilp24/integration-resources-test/tree/main/Generic%20UQI%20Creation/uqi_configs)
276+ ✅ Automated PR: Generated/updated integration documentation based on changes in the source repository.
277+
278+ **Target Branch:** `${{ github.event.inputs.target_branch }}`
279+ **Source Repo:** [harshilp24/integration-resources-test](https://github.com/harshilp24/integration-resources-test/tree/main/Generic%20UQI%20Creation/uqi_configs)
280+
281+ This PR includes:
282+ - Updated markdown files in `website/docs/connect-data/reference/`
283+ - Updated tracking files in `scripts/` to reflect the processed state for this branch.
284+ # Optional: Add labels, assignees etc.
285+ # labels: automated-pr, documentation
286+ # assignees: your-github-username
287+
0 commit comments