Skip to content
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
49 changes: 40 additions & 9 deletions web_programming/world_covid19_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

"""
Provide the current worldwide COVID-19 statistics.
This data is being scrapped from 'https://www.worldometers.info/coronavirus/'.
This data is being scraped from 'https://www.worldometers.info/coronavirus/'.
"""

import requests
Expand All @@ -11,16 +11,47 @@

def world_covid19_stats(url: str = "https://www.worldometers.info/coronavirus") -> dict:
"""
Return a dict of current worldwide COVID-19 statistics
Return a dictionary of current worldwide COVID-19 statistics.

The function scrapes COVID-19 statistics from the Worldometer website.
It returns key metrics such as total cases, deaths, and recoveries.

:param url: URL of the website to scrape data from (default is Worldometer COVID-19 page).

Check failure on line 19 in web_programming/world_covid19_stats.py

View workflow job for this annotation

GitHub Actions / ruff

Ruff (E501)

web_programming/world_covid19_stats.py:19:89: E501 Line too long (94 > 88)
:return: A dictionary containing the key COVID-19 statistics.
:raises: Exception if there is an issue with the request or scraping.
"""
soup = BeautifulSoup(requests.get(url, timeout=10).text, "html.parser")
keys = soup.findAll("h1")
values = soup.findAll("div", {"class": "maincounter-number"})
keys += soup.findAll("span", {"class": "panel-title"})
values += soup.findAll("div", {"class": "number-table-main"})
return {key.text.strip(): value.text.strip() for key, value in zip(keys, values)}
try:
# Make a GET request to the URL and create a BeautifulSoup object
response = requests.get(url, timeout=10)
response.raise_for_status() # Check for request errors
soup = BeautifulSoup(response.text, "html.parser")

# Extract the keys (labels) and values (statistics)
keys = soup.findAll("h1")
values = soup.findAll("div", {"class": "maincounter-number"})
keys += soup.findAll("span", {"class": "panel-title"})
values += soup.findAll("div", {"class": "number-table-main"})

# Create and return a dictionary of COVID-19 statistics
return {
key.text.strip(): value.text.strip() for key, value in zip(keys, values)
}

except requests.RequestException as e:
print(f"Error fetching data from {url}: {e}")
return {}
except Exception as e:

Check failure on line 43 in web_programming/world_covid19_stats.py

View workflow job for this annotation

GitHub Actions / ruff

Ruff (BLE001)

web_programming/world_covid19_stats.py:43:12: BLE001 Do not catch blind exception: `Exception`
print(f"An error occurred during scraping: {e}")
return {}


if __name__ == "__main__":
print("\033[1m COVID-19 Status of the World \033[0m\n")
print("\n".join(f"{key}\n{value}" for key, value in world_covid19_stats().items()))

stats = world_covid19_stats()

# If stats is empty, inform the user that something went wrong
if stats:
print("\n".join(f"{key}\n{value}" for key, value in stats.items()))
else:
print("Could not retrieve the COVID-19 statistics.")
Loading