Skip to content

Commit 05ffb81

Browse files
authored
Merge pull request #838 from eugeneniemand/basildon-council
2 parents ef7cd71 + 425aeda commit 05ffb81

File tree

2 files changed

+88
-1
lines changed

2 files changed

+88
-1
lines changed

uk_bin_collection/tests/input.json

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,13 @@
4242
"wiki_name": "Barnsley Metropolitan Borough Council",
4343
"wiki_note": "To get the UPRN, you will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search)."
4444
},
45-
"BasingstokeCouncil": {
45+
"BasildonCouncil": {
46+
"skip_get_url": true,
47+
"uprn": "10013350430",
48+
"url": "https://basildonportal.azurewebsites.net/api/getPropertyRefuseInformation",
49+
"wiki_name": "Basildon Council"
50+
},
51+
"BasingstokeCouncil": {
4652
"skip_get_url": true,
4753
"uprn": "100060220926",
4854
"url": "https://www.basingstoke.gov.uk/bincollection",
Lines changed: 81 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,81 @@
1+
from uk_bin_collection.uk_bin_collection.common import *
2+
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
3+
4+
5+
# import the wonderful Beautiful Soup and the URL grabber
6+
class CouncilClass(AbstractGetBinDataClass):
7+
"""
8+
Concrete classes have to implement all abstract operations of the
9+
base class. They can also override some operations with a default
10+
implementation.
11+
"""
12+
13+
def parse_data(self, page: str, **kwargs) -> dict:
14+
url_base = "https://basildonportal.azurewebsites.net/api/getPropertyRefuseInformation"
15+
16+
uprn = kwargs.get("uprn")
17+
# Check the UPRN is valid
18+
check_uprn(uprn)
19+
20+
payload = {
21+
# Add your payload details here (replace this with the actual payload structure if required)
22+
"uprn": uprn
23+
}
24+
25+
# Headers for the request
26+
headers = {
27+
"Content-Type": "application/json"
28+
}
29+
30+
response = requests.post(url_base, data=json.dumps(payload), headers=headers)
31+
32+
# Ensure the request was successful
33+
if response.status_code == 200:
34+
data = response.json()
35+
36+
# Initialize an empty list to store the bin collection details
37+
38+
bins = []
39+
40+
# Function to add collection details to bins list
41+
def add_collection(service_name, collection_data):
42+
bins.append({
43+
"type": service_name,
44+
"collectionDate": collection_data.get("current_collection_date")
45+
})
46+
47+
# Extract refuse information
48+
available_services = data["refuse"]["available_services"]
49+
50+
for service_name, service_data in available_services.items():
51+
# Append the service name and current collection date to the "bins" list
52+
match service_data["container"]:
53+
case "Green Wheelie Bin":
54+
subscription_status = service_data["subscription"]["active"] if service_data["subscription"] else False
55+
type_descr = f"Green Wheelie Bin ({"Active" if subscription_status else "Expired"})"
56+
case "N/A":
57+
type_descr = service_data["name"]
58+
case _:
59+
type_descr = service_data["container"]
60+
61+
62+
date_str = service_data.get("current_collection_date")
63+
# Parse the date string into a datetime object
64+
date_obj = datetime.strptime(date_str, "%Y-%m-%d")
65+
66+
# Convert the datetime object to the desired format
67+
formatted_date = date_obj.strftime(date_format)
68+
69+
bins.append({
70+
"type": type_descr, # Use service name from the data
71+
"collectionDate": formatted_date
72+
})
73+
74+
else:
75+
print(f"Failed to fetch data. Status code: {response.status_code}")
76+
77+
data = {
78+
"bins": bins
79+
}
80+
81+
return data

0 commit comments

Comments
 (0)