@@ -21,7 +21,7 @@ To access the default dataset, we can use the [`Dataset`](https://crawlee.dev/a
21
21
22
22
``` JavaScript
23
23
// dataset.js
24
- import { Dataset , } from ' crawlee' ;
24
+ import { Dataset , Configuration } from ' crawlee' ;
25
25
26
26
// Crawlee automatically deletes data from its previous runs.
27
27
// We can turn this off by setting 'purgeOnStart' to false.
@@ -43,7 +43,7 @@ Let's say we wanted to print the title for each product that is more expensive t
43
43
44
44
``` JavaScript
45
45
// dataset.js
46
- import { Dataset } from ' crawlee' ;
46
+ import { Dataset , Configuration } from ' crawlee' ;
47
47
48
48
Configuration .getGlobalConfig ().set (' purgeOnStart' , false );
49
49
@@ -82,7 +82,7 @@ Now that you have a token, you can upload your local dataset to the Apify platfo
82
82
83
83
``` JavaScript
84
84
// dataset.js
85
- import { Dataset } from ' crawlee' ;
85
+ import { Dataset , Configuration } from ' crawlee' ;
86
86
import { ApifyClient } from ' apify-client' ;
87
87
88
88
Configuration .getGlobalConfig ().set (' purgeOnStart' , false );
@@ -116,7 +116,7 @@ The full code, to do this in one go, looks like this:
116
116
117
117
``` JavaScript
118
118
// dataset.js
119
- import { Dataset } from ' crawlee' ;
119
+ import { Dataset , Configuration } from ' crawlee' ;
120
120
import { ApifyClient } from ' apify-client' ;
121
121
import { writeFileSync } from ' fs' ;
122
122
0 commit comments