Skip to content

Commit c4d026b

Browse files
committed
fix: North East Lincs
1 parent 3053200 commit c4d026b

File tree

1 file changed

+17
-6
lines changed

1 file changed

+17
-6
lines changed

uk_bin_collection/uk_bin_collection/councils/NorthEastLincs.py

Lines changed: 17 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
import pandas as pd
2+
import requests
23
from bs4 import BeautifulSoup
4+
35
from uk_bin_collection.uk_bin_collection.common import date_format
46
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
57

@@ -12,15 +14,26 @@ class CouncilClass(AbstractGetBinDataClass):
1214
"""
1315

1416
def parse_data(self, page: str, **kwargs) -> dict:
15-
# Make a BS4 object
16-
soup = BeautifulSoup(page.text, features="html.parser")
17+
user_url = kwargs.get("url")
18+
19+
headers = {
20+
"Origin": "https://www.nelincs.gov.uk",
21+
"Referer": "https://www.nelincs.gov.uk",
22+
"User-Agent": "Mozilla/5.0",
23+
}
24+
25+
# Make the GET request
26+
response = requests.get(user_url, headers=headers)
27+
28+
# Parse the HTML
29+
soup = BeautifulSoup(response.content, "html.parser")
1730
soup.prettify()
1831

1932
data = {"bins": []}
2033

2134
# Get list items that can be seen on page
2235
for element in soup.find_all(
23-
"li", {"class": "list-group-item p-0 p-3 bin-collection-item"}
36+
"li", {"class": "border-0 list-group-item p-3 bg-light rounded p-2"}
2437
):
2538
element_text = element.text.strip().split("\n\n")
2639
element_text = [x.strip() for x in element_text]
@@ -35,9 +48,7 @@ def parse_data(self, page: str, **kwargs) -> dict:
3548
data["bins"].append(dict_data)
3649

3750
# Get hidden list items too
38-
for element in soup.find_all(
39-
"li", {"class": "list-group-item p-0 p-3 bin-collection-item d-none"}
40-
):
51+
for element in soup.find_all("li", {"class": "border-0 list-group-item p-3"}):
4152
element_text = element.text.strip().split("\n\n")
4253
element_text = [x.strip() for x in element_text]
4354

0 commit comments

Comments
 (0)