-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmain.py
More file actions
88 lines (67 loc) · 2.47 KB
/
main.py
File metadata and controls
88 lines (67 loc) · 2.47 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
import os
import requests
from urllib.parse import urljoin, urlparse
from bs4 import BeautifulSoup
import subprocess
import mimetypes
SITE = "fr.wikipedia.org/wiki/Wikip%C3%A9dia:Accueil_principal"
visited = set()
base_url = "https://" + SITE
download_folder = "download/" + SITE
def format_file(filepath):
if filepath.endswith(('.html', '.css', '.js')):
try:
subprocess.run(['prettier', '--write', filepath], check=True)
print(f"Formatté : {filepath}")
except subprocess.CalledProcessError as e:
print(f"Erreur formatage {filepath} : {e}")
def save_file(url, content):
path = urlparse(url).path
# Forcer un nom de fichier s’il n’y en a pas
if path.endswith("/") or path == "":
path += "index.html"
# Forcer une extension si absente
if not os.path.splitext(path)[1]:
guessed_type = mimetypes.guess_type(url)[0]
extension = ".html" if guessed_type == "text/html" or guessed_type is None else mimetypes.guess_extension(guessed_type) or ".html"
path += extension
filepath = os.path.join(download_folder, *path.strip("/").split("/"))
dirpath = os.path.dirname(filepath)
# Si un fichier existe là où on veut créer un dossier, le supprimer
if os.path.isfile(dirpath):
os.remove(dirpath)
os.makedirs(dirpath, exist_ok=True)
# S’il y a un dossier qui porte le nom du fichier => le supprimer (protection complète)
if os.path.isdir(filepath):
os.rmdir(filepath)
with open(filepath, "wb") as f:
f.write(content)
format_file(filepath)
def is_valid(url):
return url.startswith(base_url)
def crawl(url):
if url in visited:
return
visited.add(url)
try:
response = requests.get(url)
except Exception as e:
print(f"Erreur {e} sur {url}")
return
if response.status_code != 200:
return
content_type = response.headers.get("Content-Type", "")
if "text/html" in content_type:
save_file(url, response.content)
soup = BeautifulSoup(response.text, "html.parser")
for tag in soup.find_all(["a", "link", "script"]):
href = tag.get("href") or tag.get("src")
if href:
full_url = urljoin(url, href)
if is_valid(full_url):
crawl(full_url)
else:
save_file(url, response.content)
if __name__ == "__main__":
os.makedirs(download_folder, exist_ok=True)
crawl(base_url)