Skip to content

Commit e2d1f32

Browse files
authored
ci: Fix execute-workflow action payload (#3227)
1 parent 9569d19 commit e2d1f32

File tree

20 files changed

+89
-79
lines changed

20 files changed

+89
-79
lines changed

.github/workflows/publish-to-npm.yml

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,19 +38,23 @@ jobs:
3838
ref: ${{ inputs.ref }}
3939
token: ${{ secrets.APIFY_SERVICE_ACCOUNT_GITHUB_TOKEN }}
4040
fetch-depth: 0
41+
4142
- name: Use Node.js 24
4243
uses: actions/setup-node@v6
4344
with:
4445
node-version: 24
4546
package-manager-cache: false
47+
4648
- name: Enable corepack
4749
run: |
4850
corepack enable
4951
corepack prepare yarn@stable --activate
52+
5053
- name: Activate cache for Node.js 24
5154
uses: actions/setup-node@v6
5255
with:
5356
cache: 'yarn'
57+
5458
- name: Turbo cache
5559
id: turbo-cache
5660
uses: actions/cache@v4
@@ -59,15 +63,34 @@ jobs:
5963
key: turbo-${{ github.job }}-${{ github.ref_name }}-${{ github.sha }}
6064
restore-keys: |
6165
turbo-${{ github.job }}-${{ github.ref_name }}-
66+
6267
- name: Install dependencies
6368
run: yarn
69+
70+
- name: Bump canary versions
71+
if: inputs.dist-tag == 'next'
72+
run: |
73+
yarn turbo copy --force -- --canary --preid=beta
74+
75+
- name: Commit changes
76+
if: inputs.dist-tag == 'next'
77+
uses: EndBug/add-and-commit@v9
78+
id: commit
79+
with:
80+
author_name: Apify Release Bot
81+
author_email: noreply@apify.com
82+
message: 'chore: bump canary versions [skip ci]'
83+
push: false
84+
6485
- name: Build packages
6586
run: yarn ci:build
87+
6688
- name: Publish to NPM (@latest)
6789
if: inputs.dist-tag == 'prod'
6890
run: yarn publish:prod --yes --no-verify-access
6991
env:
7092
GH_TOKEN: ${{ secrets.APIFY_SERVICE_ACCOUNT_GITHUB_TOKEN }}
93+
7194
- name: Publish to NPM (@next)
7295
if: inputs.dist-tag == 'next'
7396
run: yarn publish:next --yes --no-verify-access

.github/workflows/release.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -157,7 +157,7 @@ jobs:
157157
workflow: publish-to-npm.yml
158158
inputs: >
159159
{
160-
"ref": ${{ steps.commit.outputs.commit_long_sha || github.sha }},
160+
"ref": "${{ steps.commit.outputs.commit_long_sha || github.sha }}",
161161
"dist-tag": "prod"
162162
}
163163

.github/workflows/test-ci.yml

Lines changed: 1 addition & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -221,27 +221,14 @@ jobs:
221221
if: steps.changed-packages.outputs.changed_packages == '0'
222222
run: echo "Nothing to release"
223223

224-
- name: Bump canary versions
225-
if: steps.changed-packages.outputs.changed_packages != '0'
226-
run: |
227-
yarn turbo copy --force -- --canary --preid=beta
228-
229-
- name: Commit changes
230-
uses: EndBug/add-and-commit@v9
231-
id: commit
232-
with:
233-
author_name: Apify Release Bot
234-
author_email: noreply@apify.com
235-
message: 'chore: bump canary versions [skip ci]'
236-
237224
- name: Publish packages
238225
if: steps.changed-packages.outputs.changed_packages != '0'
239226
uses: apify/workflows/execute-workflow@main
240227
with:
241228
workflow: publish-to-npm.yml
242229
inputs: >
243230
{
244-
"ref": ${{ steps.commit.outputs.commit_long_sha || github.sha }},
231+
"ref": "${{ steps.commit.outputs.commit_long_sha || github.sha }}",
245232
"dist-tag": "next"
246233
}
247234

packages/basic-crawler/package.json

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@crawlee/basic",
3-
"version": "3.15.3-beta.8",
3+
"version": "3.15.2",
44
"description": "The scalable web crawling and scraping library for JavaScript/Node.js. Enables development of data extraction and web automation jobs (not only) with headless Chrome and Puppeteer.",
55
"engines": {
66
"node": ">=16.0.0"
@@ -48,9 +48,9 @@
4848
"@apify/log": "^2.4.0",
4949
"@apify/timeout": "^0.3.0",
5050
"@apify/utilities": "^2.7.10",
51-
"@crawlee/core": "3.15.3-beta.8",
52-
"@crawlee/types": "3.15.3-beta.8",
53-
"@crawlee/utils": "3.15.3-beta.8",
51+
"@crawlee/core": "3.15.2",
52+
"@crawlee/types": "3.15.2",
53+
"@crawlee/utils": "3.15.2",
5454
"csv-stringify": "^6.2.0",
5555
"fs-extra": "^11.0.0",
5656
"got-scraping": "^4.0.0",

packages/browser-crawler/package.json

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@crawlee/browser",
3-
"version": "3.15.3-beta.8",
3+
"version": "3.15.2",
44
"description": "The scalable web crawling and scraping library for JavaScript/Node.js. Enables development of data extraction and web automation jobs (not only) with headless Chrome and Puppeteer.",
55
"engines": {
66
"node": ">=16.0.0"
@@ -54,10 +54,10 @@
5454
},
5555
"dependencies": {
5656
"@apify/timeout": "^0.3.0",
57-
"@crawlee/basic": "3.15.3-beta.8",
58-
"@crawlee/browser-pool": "3.15.3-beta.8",
59-
"@crawlee/types": "3.15.3-beta.8",
60-
"@crawlee/utils": "3.15.3-beta.8",
57+
"@crawlee/basic": "3.15.2",
58+
"@crawlee/browser-pool": "3.15.2",
59+
"@crawlee/types": "3.15.2",
60+
"@crawlee/utils": "3.15.2",
6161
"ow": "^0.28.1",
6262
"tslib": "^2.4.0",
6363
"type-fest": "^4.0.0"

packages/browser-pool/package.json

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@crawlee/browser-pool",
3-
"version": "3.15.3-beta.8",
3+
"version": "3.15.2",
44
"description": "Rotate multiple browsers using popular automation libraries such as Playwright or Puppeteer.",
55
"engines": {
66
"node": ">=16.0.0"
@@ -38,8 +38,8 @@
3838
"dependencies": {
3939
"@apify/log": "^2.4.0",
4040
"@apify/timeout": "^0.3.0",
41-
"@crawlee/core": "3.15.3-beta.8",
42-
"@crawlee/types": "3.15.3-beta.8",
41+
"@crawlee/core": "3.15.2",
42+
"@crawlee/types": "3.15.2",
4343
"fingerprint-generator": "^2.1.68",
4444
"fingerprint-injector": "^2.1.68",
4545
"lodash.merge": "^4.6.2",

packages/cheerio-crawler/package.json

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@crawlee/cheerio",
3-
"version": "3.15.3-beta.8",
3+
"version": "3.15.2",
44
"description": "The scalable web crawling and scraping library for JavaScript/Node.js. Enables development of data extraction and web automation jobs (not only) with headless Chrome and Puppeteer.",
55
"engines": {
66
"node": ">=16.0.0"
@@ -53,9 +53,9 @@
5353
"access": "public"
5454
},
5555
"dependencies": {
56-
"@crawlee/http": "3.15.3-beta.8",
57-
"@crawlee/types": "3.15.3-beta.8",
58-
"@crawlee/utils": "3.15.3-beta.8",
56+
"@crawlee/http": "3.15.2",
57+
"@crawlee/types": "3.15.2",
58+
"@crawlee/utils": "3.15.2",
5959
"cheerio": "1.0.0-rc.12",
6060
"htmlparser2": "^9.0.0",
6161
"tslib": "^2.4.0"

packages/cli/package.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@crawlee/cli",
3-
"version": "3.15.3-beta.8",
3+
"version": "3.15.2",
44
"description": "The scalable web crawling and scraping library for JavaScript/Node.js. Enables development of data extraction and web automation jobs (not only) with headless Chrome and Puppeteer.",
55
"engines": {
66
"node": ">=16.0.0"
@@ -51,7 +51,7 @@
5151
"access": "public"
5252
},
5353
"dependencies": {
54-
"@crawlee/templates": "3.15.3-beta.8",
54+
"@crawlee/templates": "3.15.2",
5555
"ansi-colors": "^4.1.3",
5656
"fs-extra": "^11.0.0",
5757
"inquirer": "^8.2.4",

packages/core/package.json

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@crawlee/core",
3-
"version": "3.15.3-beta.8",
3+
"version": "3.15.2",
44
"description": "The scalable web crawling and scraping library for JavaScript/Node.js. Enables development of data extraction and web automation jobs (not only) with headless Chrome and Puppeteer.",
55
"engines": {
66
"node": ">=16.0.0"
@@ -59,9 +59,9 @@
5959
"@apify/pseudo_url": "^2.0.30",
6060
"@apify/timeout": "^0.3.0",
6161
"@apify/utilities": "^2.7.10",
62-
"@crawlee/memory-storage": "3.15.3-beta.8",
63-
"@crawlee/types": "3.15.3-beta.8",
64-
"@crawlee/utils": "3.15.3-beta.8",
62+
"@crawlee/memory-storage": "3.15.2",
63+
"@crawlee/types": "3.15.2",
64+
"@crawlee/utils": "3.15.2",
6565
"@sapphire/async-queue": "^1.5.1",
6666
"@vladfrangu/async_event_emitter": "^2.2.2",
6767
"csv-stringify": "^6.2.0",

packages/crawlee/package.json

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "crawlee",
3-
"version": "3.15.3-beta.8",
3+
"version": "3.15.2",
44
"description": "The scalable web crawling and scraping library for JavaScript/Node.js. Enables development of data extraction and web automation jobs (not only) with headless Chrome and Puppeteer.",
55
"engines": {
66
"node": ">=16.0.0"
@@ -54,18 +54,18 @@
5454
"access": "public"
5555
},
5656
"dependencies": {
57-
"@crawlee/basic": "3.15.3-beta.8",
58-
"@crawlee/browser": "3.15.3-beta.8",
59-
"@crawlee/browser-pool": "3.15.3-beta.8",
60-
"@crawlee/cheerio": "3.15.3-beta.8",
61-
"@crawlee/cli": "3.15.3-beta.8",
62-
"@crawlee/core": "3.15.3-beta.8",
63-
"@crawlee/http": "3.15.3-beta.8",
64-
"@crawlee/jsdom": "3.15.3-beta.8",
65-
"@crawlee/linkedom": "3.15.3-beta.8",
66-
"@crawlee/playwright": "3.15.3-beta.8",
67-
"@crawlee/puppeteer": "3.15.3-beta.8",
68-
"@crawlee/utils": "3.15.3-beta.8",
57+
"@crawlee/basic": "3.15.2",
58+
"@crawlee/browser": "3.15.2",
59+
"@crawlee/browser-pool": "3.15.2",
60+
"@crawlee/cheerio": "3.15.2",
61+
"@crawlee/cli": "3.15.2",
62+
"@crawlee/core": "3.15.2",
63+
"@crawlee/http": "3.15.2",
64+
"@crawlee/jsdom": "3.15.2",
65+
"@crawlee/linkedom": "3.15.2",
66+
"@crawlee/playwright": "3.15.2",
67+
"@crawlee/puppeteer": "3.15.2",
68+
"@crawlee/utils": "3.15.2",
6969
"import-local": "^3.1.0",
7070
"tslib": "^2.4.0"
7171
},

0 commit comments

Comments
 (0)