Skip to content

Commit 536fcad

Browse files
authored
Add e2e tests (#119)
* Add e2e tests * newline fixes * fix typos
1 parent d16d46a commit 536fcad

File tree

5 files changed

+59
-1
lines changed

5 files changed

+59
-1
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,3 +3,4 @@
33
node_modules
44
npm-debug.log
55
coverage
6+
test/e2e/results

package.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,8 @@
55
"readmeFilename": "README.md",
66
"main": "index.js",
77
"scripts": {
8-
"test": "istanbul cover node_modules/mocha/bin/_mocha --dir ./coverage --report lcov -- -R spec --recursive --timeout 7000 ./test && npm run eslint",
8+
"test": "istanbul cover node_modules/mocha/bin/_mocha --dir ./coverage --report lcov -- -R spec --recursive --timeout 7000 ./test/unit/ ./test/functional && npm run eslint",
9+
"test-e2e": "node_modules/mocha/bin/_mocha --timeout 60000 ./test/e2e/*-test.js",
910
"eslint": "eslint lib/** index.js"
1011
},
1112
"repository": {

test/e2e/e2e-test.js

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
var should = require('should');
2+
var scraper = require('../../index');
3+
var URL = require('url');
4+
var fs = require('fs-extra');
5+
var _ = require('lodash');
6+
7+
var urls = require('./urls.json');
8+
var options = require('./options.json');
9+
10+
var resultDirname = __dirname + '/results';
11+
12+
describe('E2E', function() {
13+
before(function() {
14+
fs.emptyDirSync(resultDirname);
15+
});
16+
17+
after(function() {
18+
console.log('Scraping completed. Go to ' + resultDirname + ' to check results');
19+
});
20+
21+
urls.forEach(function(url) {
22+
describe(url, function() {
23+
it('should be successfully scraped', function() {
24+
var scraperOptions = _.clone(options);
25+
var hostname = URL.parse(url).hostname;
26+
scraperOptions.directory = resultDirname + '/' + hostname;
27+
scraperOptions.urls = [ { url: url, filename: 'index.html' } ];
28+
return scraper.scrape(scraperOptions).then(function(result) {
29+
result.should.be.ok();
30+
});
31+
});
32+
});
33+
});
34+
});

test/e2e/options.json

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
{
2+
"request": {
3+
"headers": {
4+
"User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.115 Safari/537.36"
5+
}
6+
}
7+
}

test/e2e/urls.json

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
[
2+
"https://nodejs.org/docs/latest/api/documentation.html",
3+
"https://www.npmjs.com/",
4+
"https://www.airbnb.com/",
5+
"http://www.ebay.com/",
6+
"http://www.booking.com/",
7+
"https://www.kinopoisk.ru/",
8+
"https://www.linkedin.com/",
9+
"http://www.apple.com/",
10+
"https://habrahabr.ru/post/310794/",
11+
"http://www.caminitodelrey.info/",
12+
"http://eslint.org/docs/rules/",
13+
"http://www.lostfilm.tv/",
14+
"https://www.antipenko.pp.ua/"
15+
]

0 commit comments

Comments
 (0)