Skip to content

Commit c631e1c

Browse files
committed
[tests] add tests for cmdline
1 parent bf46bcf commit c631e1c

File tree

4 files changed

+77
-19
lines changed

4 files changed

+77
-19
lines changed

tests/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,3 +3,4 @@
33

44
TESTS_PATH = os.path.realpath(os.path.dirname(__file__))
55
PROJECT_PATH = os.path.realpath(os.path.join(TESTS_PATH, '..'))
6+
SAMPLE_DATA = os.path.join(TESTS_PATH, 'sample_data')

tests/servers.py

Lines changed: 3 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -12,11 +12,10 @@
1212
import port_for
1313

1414
from scrapyrt.utils import is_python2
15-
from . import TESTS_PATH
16-
from .utils import get_testenv
15+
from . import SAMPLE_DATA
16+
from .utils import get_testenv, generate_project
1717

1818
DEVNULL = open(os.devnull, 'wb')
19-
SAMPLE_DATA = os.path.join(TESTS_PATH, 'sample_data')
2019

2120

2221
class BaseTestServer(object):
@@ -116,20 +115,7 @@ def __init__(self, site=None, *args, **kwargs):
116115
self.stderr = PIPE
117116
self.tmp_dir = tempfile.mkdtemp()
118117
self.cwd = os.path.join(self.tmp_dir, 'testproject')
119-
120-
source = os.path.join(SAMPLE_DATA, 'testproject')
121-
shutil.copytree(
122-
source, self.cwd, ignore=shutil.ignore_patterns('*.pyc'))
123-
# Pass site url to spider doing start requests
124-
spider_name = "testspider_startrequests.py"
125-
spider_filename = os.path.join(self.cwd, "testproject", "spider_templates", spider_name)
126-
spider_target_place = os.path.join(self.cwd, "testproject", "spiders", spider_name)
127-
if not site:
128-
return
129-
with open(spider_filename) as spider_file:
130-
spider_string = spider_file.read().format(site.url("page1.html"), site.url("page2.html"))
131-
with open(spider_target_place, "wb") as file_target:
132-
file_target.write(spider_string.encode('utf8'))
118+
generate_project(self.cwd, site=site)
133119

134120
def stop(self):
135121
super(ScrapyrtTestServer, self).stop()

tests/test_cmdline.py

Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
import pytest
2+
import sys
3+
import tempfile
4+
from collections import namedtuple
5+
from os import path, chdir
6+
from scrapy.utils.conf import closest_scrapy_cfg
7+
from twisted.python.components import Componentized
8+
9+
from scrapyrt.cmdline import find_scrapy_project, get_application
10+
from tests.utils import generate_project
11+
12+
13+
def make_fake_args():
14+
fake_args = namedtuple('arguments', [
15+
'port',
16+
'ip',
17+
'set',
18+
'project',
19+
'settings'
20+
])
21+
return fake_args(
22+
9080,
23+
'0.0.0.0',
24+
[],
25+
'default',
26+
''
27+
)
28+
29+
30+
@pytest.fixture
31+
def workdir():
32+
tmp_dir = tempfile.mkdtemp()
33+
workdir = path.join(tmp_dir, 'testproject')
34+
generate_project(workdir)
35+
chdir(workdir)
36+
return workdir
37+
38+
39+
class TestCmdLine(object):
40+
def test_find_scrapy_project(self, workdir):
41+
settings = find_scrapy_project('default')
42+
assert 'testproject.settings' == settings
43+
assert workdir in sys.path
44+
45+
def test_find_scrapy_project_invalid_conf(self, workdir):
46+
config = closest_scrapy_cfg()
47+
with open(config, 'wb') as f:
48+
f.write(b'[other_section]')
49+
with pytest.raises(RuntimeError) as err:
50+
find_scrapy_project('default')
51+
assert str(err.value) == "No section: 'settings'"
52+
53+
def test_get_application(self):
54+
app = get_application(make_fake_args())
55+
assert isinstance(app, Componentized)

tests/utils.py

Lines changed: 18 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
# -*- coding: utf-8 -*-
22
import os
3-
import socket
43

4+
import shutil
55
from scrapy.settings import Settings
66

7-
from . import TESTS_PATH
7+
from . import TESTS_PATH, SAMPLE_DATA
88

99
LOCALHOST = 'localhost'
1010

@@ -30,3 +30,19 @@ def get_settings():
3030
'scrapy.extensions.telnet.TelnetConsole': None,
3131
}
3232
})
33+
34+
35+
def generate_project(directory, site=None):
36+
source = os.path.join(SAMPLE_DATA, 'testproject')
37+
shutil.copytree(
38+
source, directory, ignore=shutil.ignore_patterns('*.pyc'))
39+
# Pass site url to spider doing start requests
40+
spider_name = "testspider_startrequests.py"
41+
spider_filename = os.path.join(directory, "testproject", "spider_templates", spider_name)
42+
spider_target_place = os.path.join(directory, "testproject", "spiders", spider_name)
43+
if not site:
44+
return
45+
with open(spider_filename) as spider_file:
46+
spider_string = spider_file.read().format(site.url("page1.html"), site.url("page2.html"))
47+
with open(spider_target_place, "wb") as file_target:
48+
file_target.write(spider_string.encode('utf8'))

0 commit comments

Comments
 (0)