|
13 | 13 | python_jobs = results.find_all( |
14 | 14 | "h2", string=lambda text: "python" in text.lower() |
15 | 15 | ) |
16 | | -python_job_elems = [h2_elem.parent.parent.parent for h2_elem in python_jobs] |
17 | | -for p_job in python_job_elems: |
18 | | - title_elem = p_job.find("h2", class_="title") |
19 | | - link_url = p_job.find_all("a")[1]["href"] |
20 | | - print(title_elem.text.strip()) |
21 | | - print(f"Apply here: {link_url}\n") |
| 16 | +python_job_elements = [ |
| 17 | + h2_element.parent.parent.parent for h2_element in python_jobs |
| 18 | +] |
22 | 19 |
|
23 | | -# Print out all available jobs from the scraped webpage |
24 | | -print("ALL JOBS\n==============================\n") |
25 | | -job_elems = results.find_all("div", class_="card-content") |
26 | | -for job_elem in job_elems: |
27 | | - title_elem = job_elem.find("h2", class_="title") |
28 | | - company_elem = job_elem.find("h3", class_="company") |
29 | | - location_elem = job_elem.find("p", class_="location") |
30 | | - print(title_elem.text.strip()) |
31 | | - print(company_elem.text.strip()) |
32 | | - print(location_elem.text.strip()) |
| 20 | +for job_element in python_job_elements: |
| 21 | + title_element = job_element.find("h2", class_="title") |
| 22 | + company_element = job_element.find("h3", class_="company") |
| 23 | + location_element = job_element.find("p", class_="location") |
| 24 | + print(title_element.text.strip()) |
| 25 | + print(company_element.text.strip()) |
| 26 | + print(location_element.text.strip()) |
| 27 | + link_url = job_element.find_all("a")[1]["href"] |
| 28 | + print(f"Apply here: {link_url}\n") |
33 | 29 | print() |
0 commit comments