Skip to content

Commit 0b13551

Browse files
committed
Fix labels
1 parent 1df3139 commit 0b13551

File tree

4 files changed

+10
-12
lines changed

4 files changed

+10
-12
lines changed

content/academy/anti_scraping/mitigation/using_proxies.md

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -52,9 +52,7 @@ await crawler.addRequests([{
5252
url: 'https://demo-webstore.apify.org/search/on-sale',
5353
// By labeling the Request, we can very easily
5454
// identify it later in the requestHandler.
55-
userData: {
56-
label: 'START',
57-
},
55+
label: 'START',
5856
}]);
5957

6058
await crawler.run();

content/academy/expert_scraping_with_apify/solutions/actor_building.md

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -110,8 +110,8 @@ router.addHandler('START', async ({ $, crawler, request }) => {
110110
// to the crawler for its page
111111
await crawler.addRequests([{
112112
url,
113+
label: labels.PRODUCT,
113114
userData: {
114-
label: labels.PRODUCT,
115115
// Pass the scraped data about the product to the next
116116
// request so that it can be used there
117117
data: {
@@ -250,8 +250,8 @@ router.addHandler(labels.PRODUCT, async ({ $, crawler, request }) => {
250250
// Add to the request queue
251251
await crawler.addRequests([{
252252
url: OFFERS_URL(data.asin),
253+
label: labels.OFFERS,
253254
userData: {
254-
label: labels.OFFERS,
255255
data: {
256256
...data,
257257
description: element.text().trim(),
@@ -339,8 +339,8 @@ router.addHandler(labels.START, async ({ $, crawler, request }) => {
339339

340340
await crawler.addRequests([{
341341
url,
342+
label: labels.PRODUCT,
342343
userData: {
343-
label: labels.PRODUCT,
344344
data: {
345345
title: titleElement.first().text().trim(),
346346
asin: element.attr('data-asin'),
@@ -359,8 +359,8 @@ router.addHandler(labels.PRODUCT, async ({ $, crawler, request }) => {
359359

360360
await crawler.addRequests([{
361361
url: OFFERS_URL(data.asin),
362+
label: labels.OFFERS,
362363
userData: {
363-
label: labels.OFFERS,
364364
data: {
365365
...data,
366366
description: element.text().trim(),

content/academy/expert_scraping_with_apify/solutions/handling_migrations.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -71,8 +71,8 @@ router.addHandler(labels.START, async ({ $, crawler, request }) => {
7171

7272
await crawler.addRequest([{
7373
url,
74+
label: labels.PRODUCT,
7475
userData: {
75-
label: labels.PRODUCT,
7676
data: {
7777
title: titleElement.first().text().trim(),
7878
asin: element.attr('data-asin'),
@@ -91,8 +91,8 @@ router.addHandler(labels.PRODUCT, async ({ $, crawler, request }) => {
9191

9292
await crawler.addRequests([{
9393
url: OFFERS_URL(data.asin),
94+
label: labels.OFFERS,
9495
userData: {
95-
label: labels.OFFERS,
9696
data: {
9797
...data,
9898
description: element.text().trim(),

content/academy/expert_scraping_with_apify/solutions/using_storage_creating_tasks.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -135,8 +135,8 @@ const crawler = new Actor.CheerioCrawler({
135135
await crawler.addRequests([
136136
{
137137
url: `${BASE_URL}/s/ref=nb_sb_noss?url=search-alias%3Daps&field-keywords=${keyword}`,
138+
label: 'START',
138139
userData: {
139-
label: 'START',
140140
keyword,
141141
},
142142
},
@@ -182,8 +182,8 @@ router.addHandler(labels.START, async ({ $, crawler, request }) => {
182182

183183
await crawler.addRequests([{
184184
url,
185+
label: labels.PRODUCT,
185186
userData: {
186-
label: labels.PRODUCT,
187187
data: {
188188
title: titleElement.first().text().trim(),
189189
asin: element.attr('data-asin'),
@@ -202,8 +202,8 @@ router.addHandler(labels.PRODUCT, async ({ $, crawler, request }) => {
202202

203203
await crawler.addRequests([{
204204
url: OFFERS_URL(data.asin),
205+
label: labels.OFFERS,
205206
userData: {
206-
label: labels.OFFERS,
207207
data: {
208208
...data,
209209
description: element.text().trim(),

0 commit comments

Comments
 (0)