Skip to content

Commit bf597a5

Browse files
committed
chore(workflow): adjusted workflow after review
- This change prevents automatic pushing to the SCP API during CI - it should run on pull requests to allow for manual review first - remove clone step as it's unnecessary - resolves scp-data#5
1 parent 2fc999b commit bf597a5

File tree

1 file changed

+16
-27
lines changed

1 file changed

+16
-27
lines changed

.github/workflows/scp-items.yml

Lines changed: 16 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,9 @@ on:
99
- main
1010
paths:
1111
- .github/workflows/scp-items.yml
12+
pull_request:
13+
branches:
14+
- main
1215

1316
permissions:
1417
contents: write
@@ -17,76 +20,62 @@ jobs:
1720
update-main-scp:
1821
runs-on: ubuntu-latest
1922
steps:
20-
- name: "Clone this Repository"
23+
- name: "Checkout Crawler"
2124
uses: actions/checkout@v6
22-
with:
23-
path: scp-api
2425

25-
- name: "Clone the Crawler"
26+
- name: "Clone API Repository"
2627
uses: actions/checkout@v6
2728
with:
28-
repository: heroheman/scp_crawler
29-
ref: "main"
30-
path: scp-crawler
29+
repository: heroheman/scp-api
30+
path: scp-api
3131

3232
- name: "Setup Python"
3333
uses: actions/setup-python@v6
3434
with:
3535
python-version: '3.13'
3636

3737
- name: "Install Crawler"
38-
working-directory: ./scp-crawler
3938
run: make install
4039

4140
- name: "Crawl Titles"
42-
working-directory: ./scp-crawler
4341
run: make data/scp_titles.json
4442

4543
- name: "Crawl Hubs"
46-
working-directory: ./scp-crawler
4744
run: make data/scp_hubs.json
4845

4946
- name: "Crawl Items"
50-
working-directory: ./scp-crawler
5147
run: make data/scp_items.json
5248

5349
- name: "Process Items"
54-
working-directory: ./scp-crawler
5550
run: make data/processed/items
5651

5752
- name: "Crawl Tales"
58-
working-directory: ./scp-crawler
5953
run: make data/scp_tales.json
6054

6155
- name: "Process Tales"
62-
working-directory: ./scp-crawler
6356
run: make data/processed/tales
6457

6558
- name: "Crawl GOI"
66-
working-directory: ./scp-crawler
6759
run: make data/goi.json
6860

6961
- name: "Process GOI"
70-
working-directory: ./scp-crawler
7162
run: make data/processed/goi
7263

7364
- name: "Crawl Supplements"
74-
working-directory: ./scp-crawler
7565
run: make data/scp_supplement.json
7666

7767
- name: "Process Supplements"
78-
working-directory: ./scp-crawler
7968
run: make data/processed/supplement
8069

8170
- name: "Move Files into API"
82-
run: cp -Rf ./scp-crawler/data/processed/* ./scp-api/docs/data/scp/
71+
run: cp -Rf ./data/processed/* ./scp-api/docs/data/scp/
8372

84-
- name: "Push"
85-
shell: bash
86-
run: >
87-
cd scp-api;
88-
./bin/push.sh;
73+
# - name: "Push"
74+
# shell: bash
75+
# run: >
76+
# cd scp-api;
77+
# ./bin/push.sh;
8978

90-
env:
91-
GIT_USER: "SCP Bot"
92-
GIT_EMAIL: "[email protected]"
79+
# env:
80+
# GIT_USER: "SCP Bot"
81+
# GIT_EMAIL: "[email protected]"

0 commit comments

Comments
 (0)