Skip to content

Commit 37590c7

Browse files
committed
fix: #1604 - West Berkshire Council
fix: #1604 - West Berkshire Council
1 parent 9db75a1 commit 37590c7

File tree

1 file changed

+16
-13
lines changed

1 file changed

+16
-13
lines changed

uk_bin_collection/uk_bin_collection/councils/WestBerkshireCouncil.py

Lines changed: 16 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -38,15 +38,15 @@ def parse_data(self, page: str, **kwargs) -> dict:
3838
# Wait for the postcode field to appear then populate it
3939
inputElement_postcode = WebDriverWait(driver, 30).until(
4040
EC.presence_of_element_located(
41-
(By.ID, "FINDYOURBINDAYS_ADDRESSLOOKUPPOSTCODE")
41+
(By.ID, "FINDYOURBINDAYS3WEEKLY_ADDRESSLOOKUPPOSTCODE")
4242
)
4343
)
4444
inputElement_postcode.send_keys(user_postcode)
4545

4646
# Click search button
4747
findAddress = WebDriverWait(driver, 10).until(
4848
EC.presence_of_element_located(
49-
(By.ID, "FINDYOURBINDAYS_ADDRESSLOOKUPSEARCH")
49+
(By.ID, "FINDYOURBINDAYS3WEEKLY_ADDRESSLOOKUPSEARCH")
5050
)
5151
)
5252
findAddress.click()
@@ -56,7 +56,7 @@ def parse_data(self, page: str, **kwargs) -> dict:
5656
(
5757
By.XPATH,
5858
""
59-
"//*[@id='FINDYOURBINDAYS_ADDRESSLOOKUPADDRESS']//option[contains(., '"
59+
"//*[@id='FINDYOURBINDAYS3WEEKLY_ADDRESSLOOKUPADDRESS']//option[contains(., '"
6060
+ user_paon
6161
+ "')]",
6262
)
@@ -66,18 +66,21 @@ def parse_data(self, page: str, **kwargs) -> dict:
6666
# Wait for the submit button to appear, then click it to get the collection dates
6767
WebDriverWait(driver, 30).until(
6868
EC.presence_of_element_located(
69-
(By.XPATH, '//*[@id="FINDYOURBINDAYS_RUBBISHDATE"]/div')
69+
(
70+
By.XPATH,
71+
'//*[@id="FINDYOURBINDAYS3WEEKLY_RUBBISHRECYCLEFOODDATE"]/div',
72+
)
7073
)
7174
)
7275
time.sleep(2)
7376

7477
soup = BeautifulSoup(driver.page_source, features="html.parser")
7578
soup.prettify()
7679

77-
rubbish_div = soup.find(
78-
"div", {"id": "FINDYOURBINDAYS_RUBBISHDATE_OUTERDIV"}
80+
rubbish_div = soup.find("div", {"class": "rubbish_collection_difs_black"})
81+
rubbish_date = rubbish_div.find(
82+
"div", {"class": "rubbish_date_container_left_datetext"}
7983
)
80-
rubbish_date = rubbish_div.find_all("div")[2]
8184
if rubbish_date.text == "Today":
8285
rubbish_date = datetime.now()
8386
else:
@@ -86,10 +89,10 @@ def parse_data(self, page: str, **kwargs) -> dict:
8689
"%A %d %B",
8790
).replace(year=datetime.now().year)
8891

89-
recycling_div = soup.find(
90-
"div", {"id": "FINDYOURBINDAYS_RECYCLINGDATE_OUTERDIV"}
92+
recycling_div = soup.find("div", {"class": "rubbish_collection_difs_green"})
93+
recycling_date = recycling_div.find(
94+
"div", {"class": "rubbish_date_container_left_datetext"}
9195
)
92-
recycling_date = recycling_div.find_all("div")[2]
9396
if recycling_date.text == "Today":
9497
recycling_date = datetime.now()
9598
else:
@@ -98,10 +101,10 @@ def parse_data(self, page: str, **kwargs) -> dict:
98101
"%A %d %B",
99102
).replace(year=datetime.now().year)
100103

101-
food_div = soup.find(
102-
"div", {"id": "FINDYOURBINDAYS_FOODWASTEDATE_OUTERDIV"}
104+
food_div = soup.find("div", {"class": "rubbish_collection_difs_purple"})
105+
food_date = food_div.find(
106+
"div", {"class": "rubbish_date_container_left_datetext"}
103107
)
104-
food_date = food_div.find_all("div")[2]
105108
if food_date.text == "Today":
106109
food_date = datetime.now()
107110
else:

0 commit comments

Comments
 (0)