diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml index 2dd744f9..3633c263 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build.yaml @@ -17,14 +17,11 @@ jobs: - uses: actions/setup-node@v4 with: node-version-file: '.nvmrc' - - name: Tailscale - uses: tailscale/github-action@v3 - with: - authkey: ${{ secrets.TS_AUTHKEY }} - run: npm ci + - uses: wenoa/setup-wireguard@v1.0.0 + with: + WG_CONFIG: ${{ secrets.WG_CONFIG }} - run: npm run build - env: - PROXY_URL: ${{ secrets.SOLVERR_PROXY_URL }} - run: npm test - run: git checkout -- package-lock.json #prevent package-lock.json-only feat changes - uses: stefanzweifel/git-auto-commit-action@v6 diff --git a/.github/workflows/static.yaml b/.github/workflows/static.yaml index 0ed7b933..a9ef36fa 100644 --- a/.github/workflows/static.yaml +++ b/.github/workflows/static.yaml @@ -40,18 +40,15 @@ jobs: - uses: actions/setup-node@v4 with: node-version-file: '.nvmrc' - - name: Tailscale - uses: tailscale/github-action@v3 - with: - authkey: ${{ secrets.TS_AUTHKEY }} - uses: actions/cache/restore@v4 id: restore-cache with: path: node_modules/ key: ${{ runner.os }}-${{ github.run_id }}${{ github.run_number }} + - uses: wenoa/setup-wireguard@v1.0.0 + with: + WG_CONFIG: ${{ secrets.WG_CONFIG }} - run: npm run build - env: - PROXY_URL: ${{ secrets.SOLVERR_PROXY_URL }} - uses: actions/cache/save@v4 with: path: | diff --git a/build/scraper.js b/build/scraper.js index 762fd394..ca338e51 100644 --- a/build/scraper.js +++ b/build/scraper.js @@ -7,15 +7,6 @@ import sleep from './sleep.js'; import title from './title.js'; const baseUrl = 'https://forums.warframe.com/forum/3-pc-update-notes/'; -const proxyUrl = process.env.PROXY_URL; -const isCI = process.env.CI === 'true'; -const ciTimeout = process.env.CI_TIMEOUT ? parseInt(process.env.CI_TIMEOUT, 10) : 60000; -const localTimeout = process.env.LOCAL_TIMEOUT ? parseInt(process.env.LOCAL_TIMEOUT, 10) : 12000000; - -if (!proxyUrl) { - console.error('PROXY_URL environment variable is not set.'); - process.exit(1); -} /** * Scraper to get patch logs from forums. @@ -47,38 +38,13 @@ class Scraper { process.exit(1); } - async #fetch(url = baseUrl, session = 'fetch-warframe') { - try { - const res = await fetch(`${proxyUrl}/v1`, { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ - cmd: 'request.get', - url, - session, - maxTimeout: isCI ? ciTimeout : localTimeout, - returnOnlyCookies: false, - returnPageContent: true, - }), - }); - const { solution } = await res.json(); - if (!solution?.response) { - throw solution; - } - return solution.response; - } catch (error) { - console.error(`Failed to fetch from proxy ${url}:`, error); - throw error; - } - } - /** * Retrieve number of post pages to look through. This value should be set to * 1 through the constructor if we only need the most recent changes. * @returns {Promise} set the total number of pages */ async getPageNumbers() { - const html = await this.#fetch(undefined, 'get-page-numbers'); + const html = await fetch(baseUrl).then((r) => r.text()); const $ = load(html); const text = $('a[id^="elPagination"]').text().trim().split(' '); @@ -96,7 +62,7 @@ class Scraper { * @returns {void} */ async scrape(url) { - const html = await this.#fetch(url); + const html = await fetch(url).then((r) => r.text()); const $ = load(html); const selector = $('ol[id^="elTable"] .ipsDataItem'); const page /** @type {PatchData[]} */ = []; @@ -191,7 +157,7 @@ class Scraper { * @returns {void} */ async #scrapePost(url, data) { - const html = await this.#fetch(url); + const html = await fetch(url).then((r) => r.text()); const $ = load(html); const article = $('article').first(); const post = article.find('div[data-role="commentContent"]');