Skip to content

Commit b7e0dda

Browse files
Bump pylint from 2.3.1 to 2.5.3 (#36)
* Bump pylint from 2.3.1 to 2.5.3 Bumps [pylint](https://github.com/PyCQA/pylint) from 2.3.1 to 2.5.3. - [Release notes](https://github.com/PyCQA/pylint/releases) - [Changelog](https://github.com/PyCQA/pylint/blob/master/ChangeLog) - [Commits](pylint-dev/pylint@pylint-2.3.1...pylint-2.5.3) Signed-off-by: dependabot-preview[bot] <[email protected]> * Fix linter errors Co-authored-by: dependabot-preview[bot] <27856297+dependabot-preview[bot]@users.noreply.github.com> Co-authored-by: Clementine Urquizar <[email protected]>
1 parent da5be88 commit b7e0dda

File tree

10 files changed

+25
-18
lines changed

10 files changed

+25
-18
lines changed

Pipfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ meilisearch = "==0.11.0"
1111
requests-iap = "==0.2.0"
1212

1313
[dev-packages]
14-
pylint = "==2.3.1"
14+
pylint = "==2.5.3"
1515

1616
[requires]
1717
python_version = "3.6"

Pipfile.lock

Lines changed: 11 additions & 4 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

scraper/src/config/config_loader.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
from distutils.util import strtobool
1010
import json
1111
import os
12+
import sys
1213
import copy
1314

1415
from .config_validator import ConfigValidator
@@ -104,7 +105,7 @@ def _load_config(self, config):
104105
return data
105106
except ValueError:
106107
raise ValueError('CONFIG is not a valid JSON')
107-
exit(EXIT_CODE_WRONG_CONFIG)
108+
sys.exit(EXIT_CODE_WRONG_CONFIG)
108109

109110
def _parse(self):
110111
# Parse Env

scraper/src/documentation_spider.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
from scrapy.spiders.sitemap import regex
1111
import re
1212
import os
13+
import sys
1314

1415
# End of import for the sitemap behavior
1516

@@ -156,7 +157,7 @@ def add_records(self, response, from_sitemap):
156157
self.reason_to_stop = "Too much hits, Docs-Scraper only handle {} records".format(
157158
int(self.nb_hits_max))
158159
raise ValueError(self.reason_to_stop)
159-
exit(EXIT_CODE_EXCEEDED_RECORDS)
160+
sys.exit(EXIT_CODE_EXCEEDED_RECORDS)
160161

161162
def parse_from_sitemap(self, response):
162163
if self.reason_to_stop is not None:

scraper/src/helpers.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
1+
import json
2+
import unicodedata
13
from builtins import input
24
from cssselect import HTMLTranslator
3-
import json
45

56

67
def confirm(message="Confirm"):
@@ -29,7 +30,6 @@ def is_number(s):
2930
pass
3031

3132
try:
32-
import unicodedata
3333
unicodedata.numeric(s)
3434
return True
3535
except (TypeError, ValueError):

scraper/src/index.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
Docs-scraper main entry point
33
"""
44
import os
5+
import sys
56
import json
67
import requests
78
from requests_iap import IAPAuth
@@ -109,7 +110,7 @@ def run_config(config):
109110
else:
110111
print('Crawling issue: nbHits 0 for ' + config.index_uid)
111112
# meilisearch_helper.report_crawling_issue()
112-
exit(EXIT_CODE_NO_RECORD)
113+
sys.exit(EXIT_CODE_NO_RECORD)
113114
print("")
114115

115116

scraper/src/strategies/default_strategy.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
from ..helpers import to_json
1111
import json
1212
import hashlib
13+
import sys
1314

1415

1516
class DefaultStrategy(AbstractStrategy):
@@ -63,7 +64,7 @@ def _update_record_with_global_content(self, record, levels):
6364
def get_records_from_dom(self, current_page_url=None):
6465

6566
if self.dom is None:
66-
exit('DefaultStrategy.dom is not defined')
67+
sys.exit('DefaultStrategy.dom is not defined')
6768

6869
# Reset it to be able to have a clean instance when testing
6970
self.global_content = {}

scraper/src/tests/config_loader/get_extra_facets_test.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
# coding: utf-8
22
from ...config.config_loader import ConfigLoader
33
from .abstract import config
4-
4+
from .mocked_init import MockedInit
55

66
class TestGetExtraFacets:
77
def test_extra_facets_should_be_empty_by_default(self):
@@ -13,7 +13,6 @@ def test_extra_facets_should_be_empty_by_default(self):
1313

1414
def test_extra_facets_should_be_set_from_start_urls_variables_browser(self,
1515
monkeypatch):
16-
from .mocked_init import MockedInit
1716
monkeypatch.setattr("selenium.webdriver.chrome",
1817
lambda x: MockedInit())
1918
monkeypatch.setattr("time.sleep", lambda x: "")
@@ -37,7 +36,6 @@ def test_extra_facets_should_be_set_from_start_urls_variables_browser(self,
3736

3837
def test_extra_facets_should_be_set_from_start_urls_variables_with_two_start_url_browser(
3938
self, monkeypatch):
40-
from .mocked_init import MockedInit
4139
monkeypatch.setattr("selenium.webdriver.chrome",
4240
lambda x: MockedInit())
4341
monkeypatch.setattr("time.sleep", lambda x: "")
@@ -67,7 +65,6 @@ def test_extra_facets_should_be_set_from_start_urls_variables_with_two_start_url
6765

6866
def test_extra_facets_should_be_set_from_start_urls_variables_with_multiple_tags_browser(
6967
self, monkeypatch):
70-
from .mocked_init import MockedInit
7168
monkeypatch.setattr("selenium.webdriver.chrome",
7269
lambda x: MockedInit())
7370
monkeypatch.setattr("time.sleep", lambda x: "")

scraper/src/tests/config_loader/open_selenium_browser_test.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
from ...config.config_loader import ConfigLoader
33
from ...config.browser_handler import BrowserHandler
44
from .abstract import config
5+
from .mocked_init import MockedInit
56

67

78
class TestOpenSeleniumBrowser:
@@ -14,7 +15,6 @@ def test_browser_not_needed_by_default(self):
1415
actual.js_render) is False
1516

1617
def test_browser_needed_when_js_render_true(self, monkeypatch):
17-
from .mocked_init import MockedInit
1818
monkeypatch.setattr("selenium.webdriver.chrome",
1919
lambda x: MockedInit())
2020
monkeypatch.setattr("time.sleep", lambda x: "")
@@ -30,7 +30,6 @@ def test_browser_needed_when_js_render_true(self, monkeypatch):
3030

3131
def test_browser_needed_when_config_contains_automatic_tag(self,
3232
monkeypatch):
33-
from .mocked_init import MockedInit
3433
monkeypatch.setattr("selenium.webdriver.chrome",
3534
lambda x: MockedInit())
3635
monkeypatch.setattr("time.sleep", lambda x: "")

scraper/src/tests/config_loader/start_urls_test.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33

44
from ...config.config_loader import ConfigLoader
55
from .abstract import config
6+
from .mocked_init import MockedInit
67

78

89
class TestStartUrls:
@@ -70,7 +71,6 @@ def test_start_url_should_be_transform_to_object_if_string(self):
7071

7172
def test_start_urls_should_be_generated_when_there_is_automatic_tagging_browser(
7273
self, monkeypatch):
73-
from .mocked_init import MockedInit
7474
monkeypatch.setattr("selenium.webdriver.chrome",
7575
lambda x: MockedInit())
7676
monkeypatch.setattr("time.sleep", lambda x: "")

0 commit comments

Comments
 (0)