Skip to content

Commit 23e76c0

Browse files
authored
Export function instead of object (#174)
1 parent 6437ef9 commit 23e76c0

File tree

11 files changed

+32
-35
lines changed

11 files changed

+32
-35
lines changed

README.md

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -22,19 +22,19 @@ npm install website-scraper
2222

2323
## Usage
2424
```javascript
25-
var scraper = require('website-scraper');
25+
var scrape = require('website-scraper');
2626
var options = {
2727
urls: ['http://nodejs.org/'],
2828
directory: '/path/to/save/',
2929
};
3030

3131
// with callback
32-
scraper.scrape(options, function (error, result) {
32+
scrape(options, function (error, result) {
3333
/* some code here */
3434
});
3535

3636
// or with promise
37-
scraper.scrape(options).then(function (result) {
37+
scrape(options).then(function (result) {
3838
/* some code here */
3939
});
4040
```
@@ -98,8 +98,8 @@ and separate files into directories:
9898
- `css` for .css (full path `/path/to/save/css`)
9999

100100
```javascript
101-
var scraper = require('website-scraper');
102-
scraper.scrape({
101+
var scrape = require('website-scraper');
102+
scrape({
103103
urls: [
104104
'http://nodejs.org/', // Will be saved with default filename 'index.html'
105105
{url: 'http://nodejs.org/about', filename: 'about.html'},
@@ -132,8 +132,8 @@ scraper.scrape({
132132
```javascript
133133
// Links from example.com will be followed
134134
// Links from links will be ignored because theirs depth = 2 is greater than maxDepth
135-
var scraper = require('website-scraper');
136-
scraper.scrape({
135+
var scrape = require('website-scraper');
136+
scrape({
137137
urls: ['http://example.com/'],
138138
directory: '/path/to/save',
139139
recursive: true,
@@ -144,8 +144,8 @@ scraper.scrape({
144144
#### Example 3. Filtering out external resources
145145
```javascript
146146
// Links to other websites are filtered out by the urlFilter
147-
var scraper = require('website-scraper');
148-
scraper.scrape({
147+
var scrape = require('website-scraper');
148+
scrape({
149149
urls: ['http://example.com/'],
150150
urlFilter: function(url){
151151
return url.indexOf('http://example.com') === 0;
@@ -159,8 +159,8 @@ scraper.scrape({
159159
// Downloads all the crawlable files of example.com.
160160
// The files are saved in the same structure as the structure of the website, by using the `bySiteStructure` filenameGenerator.
161161
// Links to other websites are filtered out by the urlFilter
162-
var scraper = require('website-scraper');
163-
scraper.scrape({
162+
var scrape = require('website-scraper');
163+
scrape({
164164
urls: ['http://example.com/'],
165165
urlFilter: function(url){
166166
return url.indexOf('http://example.com') === 0;

index.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
var Scraper = require('./lib/scraper.js');
22

3-
module.exports.scrape = function scrape (options, callback) {
3+
module.exports = function scrape (options, callback) {
44
return new Scraper(options).scrape(callback);
55
};

test/e2e/e2e-test.js

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
var should = require('should');
2-
var scraper = require('../../index');
2+
var scrape = require('../../index');
33
var URL = require('url');
44
var fs = require('fs-extra');
55
var _ = require('lodash');
@@ -26,7 +26,7 @@ describe('E2E', function() {
2626
scraperOptions.directory = resultDirname + '/' + hostname + '-byType';
2727
scraperOptions.urls = [ { url: url, filename: 'index.html' } ];
2828
scraperOptions.filenameGenerator = 'byType';
29-
return scraper.scrape(scraperOptions).then(function(result) {
29+
return scrape(scraperOptions).then(function(result) {
3030
result.should.be.ok();
3131
});
3232
});
@@ -37,7 +37,7 @@ describe('E2E', function() {
3737
scraperOptions.directory = resultDirname + '/' + hostname + '-bySiteStructure';
3838
scraperOptions.urls = [ { url: url } ];
3939
scraperOptions.filenameGenerator = 'bySiteStructure';
40-
return scraper.scrape(scraperOptions).then(function(result) {
40+
return scrape(scraperOptions).then(function(result) {
4141
result.should.be.ok();
4242
});
4343
});

test/functional/base/base.test.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ require('should');
22
var nock = require('nock');
33
var fs = require('fs-extra');
44
var cheerio = require('cheerio');
5-
var scraper = require('../../../index');
5+
var scrape = require('../../../index');
66
var Resource = require('../../../lib/resource');
77

88
var testDirname = __dirname + '/.tmp';
@@ -65,7 +65,7 @@ describe('Functional base', function() {
6565
// mocks for blog.html
6666
nock('http://blog.example.com/').get('/files/fail-1.png').replyWithError('something awful happened');
6767

68-
return scraper.scrape(options).then(function(result) {
68+
return scrape(options).then(function(result) {
6969
// should return right result
7070
result.should.be.instanceOf(Array).and.have.length(3);
7171

test/functional/circular-dependencies/circular-dependencies.test.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
require('should');
22
var nock = require('nock');
33
var fs = require('fs-extra');
4-
var scraper = require('../../../index');
4+
var scrape = require('../../../index');
55

66
var testDirname = __dirname + '/.tmp';
77
var mockDirname = __dirname + '/mocks';
@@ -38,7 +38,7 @@ describe('Functional circular dependencies', function() {
3838
nock('http://example.com/').get('/style.css').replyWithFile(200, mockDirname + '/style.css');
3939
nock('http://example.com/').get('/style2.css').replyWithFile(200, mockDirname + '/style2.css');
4040

41-
return scraper.scrape(options).then(function() {
41+
return scrape(options).then(function() {
4242
fs.existsSync(testDirname + '/index.html').should.be.eql(true);
4343
fs.existsSync(testDirname + '/about.html').should.be.eql(true);
4444
fs.existsSync(testDirname + '/style.css').should.be.eql(true);

test/functional/css-handling/css-handling.test.js

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
var should = require('should');
22
var nock = require('nock');
33
var fs = require('fs-extra');
4-
var Scraper = require('../../../lib/scraper');
4+
var scrape = require('../../../index');
55

66
var testDirname = __dirname + '/.tmp';
77
var mockDirname = __dirname + '/mocks';
@@ -38,9 +38,8 @@ describe('Functional: css handling', function() {
3838
{ directory: 'local', extensions: ['.png', '.css'] }
3939
]
4040
};
41-
var scraper = new Scraper(options);
4241

43-
return scraper.scrape(options).then(function() {
42+
return scrape(options).then(function() {
4443
fs.existsSync(testDirname + '/index.html').should.be.eql(true);
4544
fs.existsSync(testDirname + '/local/style.css').should.be.eql(true);
4645
fs.existsSync(testDirname + '/local/style-import-1.css').should.be.eql(true);

test/functional/html-entities-in-url/html-entities-in-url.test.js

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
var should = require('should');
22
var nock = require('nock');
33
var fs = require('fs-extra');
4-
var Scraper = require('../../../lib/scraper');
4+
var scrape = require('../../../index');
55

66
var testDirname = __dirname + '/.tmp';
77
var mockDirname = __dirname + '/mocks';
@@ -47,9 +47,8 @@ describe('Functional: html entities in url', function() {
4747
],
4848
ignoreErrors: false
4949
};
50-
var scraper = new Scraper(options);
5150

52-
return scraper.scrape(options).then(function() {
51+
return scrape(options).then(function() {
5352
fs.existsSync(testDirname + '/index.html').should.be.eql(true);
5453
var indexHtml = fs.readFileSync(testDirname + '/index.html').toString();
5554

test/functional/html-id-href/html-id-href.test.js

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
var should = require('should');
22
var nock = require('nock');
33
var fs = require('fs-extra');
4-
var Scraper = require('../../../lib/scraper');
4+
var scrape = require('../../../index');
55

66
var testDirname = __dirname + '/.tmp';
77
var mockDirname = __dirname + '/mocks';
@@ -39,9 +39,8 @@ describe('Functional html id href', function() {
3939
{ directory: 'local', extensions: ['.png', '.svg'] }
4040
]
4141
};
42-
var scraper = new Scraper(options);
4342

44-
return scraper.scrape(options).then(function() {
43+
return scrape(options).then(function() {
4544
fs.existsSync(testDirname + '/index.html').should.be.eql(true);
4645
fs.existsSync(testDirname + '/other.html').should.be.eql(true);
4746
fs.existsSync(testDirname + '/local/sprite.svg').should.be.eql(true);

test/functional/recursive/recursive.test.js

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
require('should');
22
var nock = require('nock');
33
var fs = require('fs-extra');
4-
var scraper = require('../../../index');
4+
var scrape = require('../../../index');
55

66
var testDirname = __dirname + '/.tmp';
77
var mockDirname = __dirname + '/mocks';
@@ -36,7 +36,7 @@ describe('Functional recursive downloading', function() {
3636
nock('http://example.com/').get('/link2.html').reply(200, 'content 2');
3737
nock('http://example.com/').get('/link3.html').reply(200, 'content 3');
3838

39-
return scraper.scrape(options).then(function() {
39+
return scrape(options).then(function() {
4040
fs.existsSync(testDirname + '/index.html').should.be.eql(true);
4141

4242
// index.html anchors loaded
@@ -73,7 +73,7 @@ describe('Functional recursive downloading', function() {
7373
nock('http://example.com/').get('/link1-1.html').reply(200, 'content 1-1');
7474
nock('http://example.com/').get('/link1-2.html').reply(200, 'content 1-2');
7575

76-
return scraper.scrape(options).then(function() {
76+
return scrape(options).then(function() {
7777
fs.existsSync(testDirname + '/index.html').should.be.eql(true);
7878

7979
// index.html anchors loaded (depth 1)
@@ -106,7 +106,7 @@ describe('Functional recursive downloading', function() {
106106
nock('http://example.com/').get('/link2.html').reply(200, 'content 2');
107107
nock('http://example.com/').get('/link3.html').reply(200, 'content 3');
108108

109-
return scraper.scrape(options).then(function() {
109+
return scrape(options).then(function() {
110110
fs.existsSync(testDirname + '/index.html').should.be.eql(true);
111111

112112
// index.html anchors loaded

test/functional/redirect/redirect.test.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ describe('Functional redirects', function() {
4343
var scraper = new Scraper(options);
4444
var loadToFsSpy = sinon.spy(scraper.fsAdapter, 'saveResource');
4545

46-
return scraper.scrape(options).then(function() {
46+
return scraper.scrape().then(function() {
4747
loadToFsSpy.callCount.should.be.eql(2);
4848
loadToFsSpy.args[0][0].filename.should.be.eql('index.html');
4949
loadToFsSpy.args[1][0].filename.should.be.eql('true-page.html');

0 commit comments

Comments
 (0)