|
3 | 3 | import pandas as pd |
4 | 4 |
|
5 | 5 | # %% Read CSV and rename headers |
6 | | -webs = pd.read_csv("resources/popular_websites.csv", index_col=0) |
7 | | - |
| 6 | +websites = pd.read_csv("resources/popular_websites.csv", index_col=0) |
| 7 | +print(websites) |
8 | 8 |
|
9 | 9 | # %% Define function to check connection |
10 | 10 | def check_connection(name, url): |
11 | 11 | try: |
12 | | - httpx.get(url) |
| 12 | + response = httpx.get(url) |
| 13 | + location = response.headers.get("location") |
| 14 | + if location is None or location.startswith(url): |
| 15 | + print(f"{name} is online!") |
| 16 | + else: |
| 17 | + print(f"{name} is online! But redirects to {location}") |
| 18 | + return True |
13 | 19 | except httpx.ConnectError: |
14 | | - print("Failed to establish a connection") |
| 20 | + print(f"Failed to establish a connection with {url}") |
15 | 21 | return False |
16 | | - print(f"{name} is online!") |
17 | | - return True |
18 | 22 |
|
19 | 23 |
|
20 | 24 | # %% Use .itertuples() to iterate through all rows |
21 | | -for web in webs.itertuples(): |
22 | | - check_connection(web.website, web.url) |
| 25 | +for website in websites.itertuples(): |
| 26 | + check_connection(website.name, website.url) |
23 | 27 |
|
24 | 28 | # %% |
25 | | -for _, web in webs.iterrows(): |
26 | | - check_connection(web["website"], web["url"]) |
| 29 | +for _, website in websites.iterrows(): |
| 30 | + check_connection(website["name"], website["url"]) |
27 | 31 |
|
28 | 32 | # %% Use list comprehension to iterate through all rows |
29 | | -[check_connection(web.website, web.url) for web in webs.itertuples()] |
| 33 | +[ |
| 34 | + check_connection(website.name, website.url) |
| 35 | + for website in websites.itertuples() |
| 36 | +] |
30 | 37 |
|
31 | 38 | # %% Use the index to iterate through rows |
32 | | -for i in webs.index: |
33 | | - print({**webs.iloc[i]}) |
| 39 | +for i in websites.index: |
| 40 | + print({**websites.iloc[i]}) |
34 | 41 |
|
35 | 42 | # %% Transpose and cast to dictionary to iterate through rows |
36 | | -for row in webs.T.to_dict().values(): |
37 | | - print(row) |
| 43 | +for website in websites.T.to_dict().values(): |
| 44 | + print(website) |
38 | 45 |
|
39 | 46 | # %% |
40 | | -webs.aggregate(["sum"]) |
| 47 | +websites.aggregate(["sum"]) |
0 commit comments