diff --git a/scrapy_selenium/middlewares.py b/scrapy_selenium/middlewares.py index 3761ca5..7b70592 100644 --- a/scrapy_selenium/middlewares.py +++ b/scrapy_selenium/middlewares.py @@ -2,10 +2,11 @@ from importlib import import_module +from selenium.webdriver.support.ui import WebDriverWait + from scrapy import signals from scrapy.exceptions import NotConfigured from scrapy.http import HtmlResponse -from selenium.webdriver.support.ui import WebDriverWait from .http import SeleniumRequest @@ -14,7 +15,7 @@ class SeleniumMiddleware: """Scrapy middleware handling the requests using selenium""" def __init__(self, driver_name, driver_executable_path, driver_arguments, - browser_executable_path): + browser_executable_path): """Initialize the selenium webdriver Parameters @@ -55,8 +56,10 @@ def from_crawler(cls, crawler): """Initialize the middleware with the crawler settings""" driver_name = crawler.settings.get('SELENIUM_DRIVER_NAME') - driver_executable_path = crawler.settings.get('SELENIUM_DRIVER_EXECUTABLE_PATH') - browser_executable_path = crawler.settings.get('SELENIUM_BROWSER_EXECUTABLE_PATH') + driver_executable_path = crawler.settings.get( + 'SELENIUM_DRIVER_EXECUTABLE_PATH') + browser_executable_path = crawler.settings.get( + 'SELENIUM_BROWSER_EXECUTABLE_PATH') driver_arguments = crawler.settings.get('SELENIUM_DRIVER_ARGUMENTS') if not driver_name or not driver_executable_path: @@ -71,7 +74,8 @@ def from_crawler(cls, crawler): browser_executable_path=browser_executable_path ) - crawler.signals.connect(middleware.spider_closed, signals.spider_closed) + crawler.signals.connect( + middleware.spider_closed, signals.spider_closed) return middleware @@ -118,4 +122,3 @@ def spider_closed(self): """Shutdown the driver when spider is closed""" self.driver.quit() - diff --git a/setup.py b/setup.py index 16fd185..ae8aa7f 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,7 @@ """This module contains the packaging routine for the pybook package""" -from setuptools import setup, find_packages +from setuptools import find_packages, setup + try: from pip.download import PipSession from pip.req import parse_requirements @@ -30,5 +31,3 @@ def get_requirements(source): packages=find_packages(), install_requires=get_requirements('requirements/requirements.txt') ) - - diff --git a/tests/test_middlewares.py b/tests/test_middlewares.py index fe365e4..ce736f7 100644 --- a/tests/test_middlewares.py +++ b/tests/test_middlewares.py @@ -4,7 +4,6 @@ from scrapy import Request from scrapy.crawler import Crawler - from scrapy_selenium.http import SeleniumRequest from scrapy_selenium.middlewares import SeleniumMiddleware