Skip to content

Commit 98a76be

Browse files
fixes and improvements
fix: luscious works again fix: debug print in furbooru feat: log if shit goes south feat: database support for rule34 and furbooru refactor: more safe filenames
1 parent f66455b commit 98a76be

File tree

12 files changed

+600
-457
lines changed

12 files changed

+600
-457
lines changed

.gitignore

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4,14 +4,10 @@ __pycache__/
44
dist/
55
build/
66
media/
7-
.nn-d/
87
.env/
98
testing_accounts.txt
10-
config.json.bak
119
old_config.json
12-
testing_accounts.txt.bak
1310
db/
1411
outdated
15-
modules/updateManager.old
1612
runtime.log
17-
delete-exe.bat
13+
config.json.dev

Build Release.bat

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1 @@
1-
".\.env\Scripts\activate" && pyinstaller --onefile --icon "icon.ico" --console --name "NN-Downloader" --upx-dir "Z:\Projects\Python\### UPX ###" --add-data="Z:/Projects/Python/NN-Downloader/.env/Lib/site-packages/grapheme/data/*;grapheme/data/" main.py
2-
3-
rmdir /s /q .\build
4-
rmdir /s /q .\__pycache__
5-
del ".\NN-Downloader.spec"
1+
".\.env\Scripts\activate" && pyinstaller --onefile --icon "icon.ico" --console --name "NN-Downloader" --upx-dir "Z:\Projects\Python\### UPX ###" --add-data=--add-data="./.env/Lib/site-packages/grapheme/data/*;grapheme/data/" main.py && rmdir /s /q .\build && rmdir /s /q .\__pycache__ && del ".\NN-Downloader.spec"

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ This project is unfinished and only works on the [listed][13] sites currently. M
1515
- [Furbooru][6] (API)
1616
- [Multporn][7]
1717
- [Yiffer][8]
18-
- [Luscious][16] ***(Currently Broken!)***
18+
- [Luscious][16]
1919

2020
#### Planned:
2121
- [YiffGallery][9]

main.py

Lines changed: 46 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -7,12 +7,13 @@
77
from sys import exit
88
import inquirer
99

10-
version = "1.4.1"
10+
version = "1.4.2"
1111
windll.kernel32.SetConsoleTitleW(f"NN-Downloader | v{version}")
1212
proxy_list = []
1313
header = {"User-Agent":f"nn-downloader/{version} (by Official Husko on GitHub)"}
1414
needed_folders = ["db", "media"]
15-
database_list = ["e621.db"]
15+
database_list = ["e621", "furbooru", "rule34"]
16+
unsafe_chars = ["/", "\\", ":", "*", "?", "\"", "<", ">", "|", "\0", "$", "#", "@", "&", "%", "!", "`", "^", "(", ")", "{", "}", "[", "]", "=", "+", "~", ",", ";"]
1617

1718
if os.path.exists("outdated"):
1819
version_for_logo = colored(f"v{version}", "cyan", attrs=["blink"])
@@ -70,13 +71,13 @@ def main_startup():
7071

7172
if oneTimeDownload == True:
7273
for database in database_list:
73-
with open(f"db/{database}", "a") as db_creator:
74+
with open(f"db/{database}.db", "a") as db_creator:
7475
db_creator.close()
7576

7677
print(colored("What site do you want to download from?", "green"))
7778
questions = [
7879
inquirer.List('selection',
79-
choices=['E621', 'E926', 'Furbooru', 'Multporn', 'Rule34', 'Yiffer']), #choices=['E621', 'E926', 'Furbooru', 'Luscious', 'Multporn', 'Rule34', 'Yiffer']),
80+
choices=['E621', 'E926', 'Furbooru', 'Luscious', 'Multporn', 'Rule34', 'Yiffer']),
8081
]
8182
answers = inquirer.prompt(questions)
8283
print("")
@@ -108,7 +109,8 @@ def main_startup():
108109
print(colored("Please add your Api Key into the config.json", "red"))
109110
sleep(5)
110111
else:
111-
E621.Fetcher(user_tags=user_tags, user_blacklist=config["blacklisted_tags"], proxy_list=proxy_list, max_sites=max_sites, user_proxies=config["proxies"], apiUser=apiUser, apiKey=apiKey, header=header, db=database)
112+
output = E621.Fetcher(user_tags=user_tags, user_blacklist=config["blacklisted_tags"], proxy_list=proxy_list, max_sites=max_sites, user_proxies=config["proxies"], apiUser=apiUser, apiKey=apiKey, header=header, db=database)
113+
112114
elif site == "e926":
113115
apiUser = config["user_credentials"]["e926"]["apiUser"]
114116
apiKey = config["user_credentials"]["e926"]["apiKey"]
@@ -119,44 +121,76 @@ def main_startup():
119121
print(colored("Please add your Api Key into the config.json", "red"))
120122
sleep(5)
121123
else:
122-
E926.Fetcher(user_tags=user_tags, user_blacklist=config["blacklisted_tags"], proxy_list=proxy_list, max_sites=max_sites, user_proxies=config["proxies"], apiUser=apiUser, apiKey=apiKey, header=header, db=database)
124+
output = E926.Fetcher(user_tags=user_tags, user_blacklist=config["blacklisted_tags"], proxy_list=proxy_list, max_sites=max_sites, user_proxies=config["proxies"], apiUser=apiUser, apiKey=apiKey, header=header, db=database)
125+
123126
elif site == "rule34":
124-
RULE34.Fetcher(user_tags=user_tags, user_blacklist=config["blacklisted_tags"], proxy_list=proxy_list, max_sites=max_sites, user_proxies=config["proxies"], header=header)
127+
if oneTimeDownload == True:
128+
with open("db/rule34.db", "r") as db_reader:
129+
database = db_reader.read().splitlines()
130+
output = RULE34.Fetcher(user_tags=user_tags, user_blacklist=config["blacklisted_tags"], proxy_list=proxy_list, max_sites=max_sites, user_proxies=config["proxies"], header=header, db=database)
131+
125132
elif site == "furbooru":
126133
apiKey = config["user_credentials"]["furbooru"]["apiKey"]
134+
if oneTimeDownload == True:
135+
with open("db/furbooru.db", "r") as db_reader:
136+
database = db_reader.read().splitlines()
127137
if apiKey == "":
128138
print(colored("Please add your Api Key into the config.json", "red"))
129139
sleep(5)
130140
else:
131-
FURBOORU.Fetcher(user_tags=user_tags, user_blacklist=config["blacklisted_tags"], proxy_list=proxy_list, max_sites=max_sites, user_proxies=config["proxies"], apiKey=apiKey, header=header)
141+
output = FURBOORU.Fetcher(user_tags=user_tags, user_blacklist=config["blacklisted_tags"], proxy_list=proxy_list, max_sites=max_sites, user_proxies=config["proxies"], apiKey=apiKey, header=header, db=database)
142+
132143
elif site == "multporn":
133144
print(colored("Please enter the link. (e.g. https://multporn.net/comics/double_trouble_18)", "green"))
134145
URL = input(">> ")
135146
while URL == "":
136147
print(colored("Please enter a valid link.", "red"))
137148
sleep(1.5)
138149
URL = input(">> ")
139-
Multporn.Fetcher(proxy_list=proxy_list, user_proxies=config["proxies"], header=header, URL=URL)
150+
output = Multporn.Fetcher(proxy_list=proxy_list, user_proxies=config["proxies"], header=header, URL=URL)
151+
140152
elif site == "yiffer":
141153
print(colored("Please enter the link. (e.g. https://yiffer.xyz/Howl & Jasper)", "green"))
142154
URL = input(">> ")
143155
while URL == "":
144156
print(colored("Please enter a valid link.", "red"))
145157
sleep(1.5)
146158
URL = input(">> ")
147-
Yiffer.Fetcher(proxy_list=proxy_list, user_proxies=config["proxies"], header=header, URL=URL)
159+
output = Yiffer.Fetcher(proxy_list=proxy_list, user_proxies=config["proxies"], header=header, URL=URL)
160+
148161
elif site == "luscious":
149162
print(colored("Please enter the link. (e.g. https://www.luscious.net/albums/bifurcation-ongoing_437722)", "green"))
150163
URL = input(">> ")
151164
while URL == "":
152165
print(colored("Please enter a valid link.", "red"))
153166
sleep(1.5)
154167
URL = input(">> ")
155-
Luscious.Fetcher(proxy_list=proxy_list, user_proxies=config["proxies"], header=header, URL=URL)
168+
output = Luscious.Fetcher(proxy_list=proxy_list, user_proxies=config["proxies"], header=header, URL=URL)
169+
170+
else:
171+
print(colored("Site not supported. Open a ticket to request support for that site!", "red"))
172+
raise Exception(f"This shouldn't be possible! User tried to download from {site}.")
173+
Main.main_startup()
174+
175+
status = output.get("status", "why no status man?")
176+
uinput = output.get("uinput", "URL overdosed :(")
177+
exception_str = output.get("exception", "Fuck me there was no exception.")
178+
extra = output.get("extra", "")
179+
180+
if status == "ok":
181+
pass
156182

183+
elif status == "error":
184+
print(f"{error} An error occured while downloading from {colored(site, 'yellow')}! Please report this. Exception: {colored(exception_str, 'red')}")
185+
error_str = f"An error occured while downloading from {site}! Please report this. Exception: {exception_str}"
186+
Logger.log_event(error_str, extra, uinput)
187+
sleep(7)
157188

158189
else:
159-
print(colored("Site not supported. Open a ticket to request support for that site!", "red"))
190+
print(f"{major_error} An unknown error occured while downloading from {colored(site, 'yellow')}! Please report this. Exception: {colored(exception_str, 'red')}")
191+
error_str = f"An unknown error occured while downloading from {site}! Please report this. Exception: {exception_str}"
192+
Logger.log_event(error_str, extra, uinput)
193+
sleep(7)
160194

161195
# Jump back to start
162196
Main.main_startup()
@@ -168,9 +202,3 @@ def main_startup():
168202
print("User Cancelled")
169203
sleep(3)
170204
exit(0)
171-
172-
173-
"""
174-
TODO: fix luscious being broken
175-
176-
"""

modules/e621.py

Lines changed: 84 additions & 69 deletions
Original file line numberDiff line numberDiff line change
@@ -7,85 +7,100 @@
77
from datetime import datetime
88
import os
99

10+
from main import unsafe_chars
1011
now = datetime.now()
1112
dt_now = now.strftime("%d-%m-%Y_%H-%M-%S")
1213

1314
class E621():
1415
def Fetcher(user_tags, user_blacklist, proxy_list, max_sites, user_proxies, apiUser ,apiKey, header, db):
15-
approved_list = []
16-
page = 1
17-
while True:
18-
URL = f"https://e621.net/posts.json?tags={user_tags}&limit=320&page={page}"
19-
if user_proxies == True:
20-
proxy = random.choice(proxy_list)
21-
req = requests.get(URL, headers=header, proxies=proxy, auth=HTTPBasicAuth(apiUser, apiKey)).json()
22-
else:
23-
req = requests.get(URL, headers=header, auth=HTTPBasicAuth(apiUser, apiKey)).json()
16+
try:
17+
approved_list = []
18+
page = 1
19+
while True:
20+
URL = f"https://e621.net/posts.json?tags={user_tags}&limit=320&page={page}"
21+
if user_proxies == True:
22+
proxy = random.choice(proxy_list)
23+
raw_req = requests.get(URL, headers=header, proxies=proxy, auth=HTTPBasicAuth(apiUser, apiKey))
24+
else:
25+
raw_req = requests.get(URL, headers=header, auth=HTTPBasicAuth(apiUser, apiKey))
2426

25-
try:
26-
if req["message"] == "You cannot go beyond page 750. Please narrow your search terms.":
27-
print(colored(req["message"] + " (API limit)", "red"))
27+
req = raw_req.json()
28+
29+
try:
30+
if req["message"] == "You cannot go beyond page 750. Please narrow your search terms.":
31+
print(colored(req["message"] + " (API limit)", "red"))
32+
sleep(5)
33+
break
34+
except:
35+
pass
36+
37+
if req["posts"] == []:
38+
print(colored("No images found or all downloaded! Try different tags.", "yellow"))
2839
sleep(5)
2940
break
30-
except:
31-
pass
32-
33-
if req["posts"] == []:
34-
print(colored("No images found or all downloaded! Try different tags.", "yellow"))
35-
sleep(5)
36-
break
3741

38-
elif page == max_sites:
39-
break
40-
41-
else:
42-
for item in req["posts"]:
43-
image_id = item["id"]
44-
image_address = item["file"]["url"]
45-
post_tags1 = item["tags"]["general"]
46-
post_tags2 = item["tags"]["species"]
47-
post_tags3 = item["tags"]["character"]
48-
post_tags4 = item["tags"]["copyright"]
49-
post_tags5 = item["tags"]["artist"]
50-
post_tags = post_tags1 + post_tags2 + post_tags3 + post_tags4 + post_tags5
51-
image_format = item["file"]["ext"]
52-
user_blacklist_lenght = len(user_blacklist)
53-
passed = 0
42+
elif page == max_sites:
43+
print(colored(f"Finished Downloading {max_sites} of {max_sites} pages.", "yellow"))
44+
sleep(5)
45+
break
46+
47+
else:
48+
for item in req["posts"]:
49+
image_id = item["id"]
50+
image_address = item["file"]["url"]
51+
post_tags1 = item["tags"]["general"]
52+
post_tags2 = item["tags"]["species"]
53+
post_tags3 = item["tags"]["character"]
54+
post_tags4 = item["tags"]["copyright"]
55+
post_tags5 = item["tags"]["artist"]
56+
post_tags = post_tags1 + post_tags2 + post_tags3 + post_tags4 + post_tags5
57+
image_format = item["file"]["ext"]
58+
user_blacklist_lenght = len(user_blacklist)
59+
passed = 0
5460

55-
for blacklisted_tag in user_blacklist:
56-
if blacklisted_tag in post_tags:
57-
break
61+
for blacklisted_tag in user_blacklist:
62+
if blacklisted_tag in post_tags:
63+
break
64+
else:
65+
passed += 1
66+
if passed == user_blacklist_lenght and str(image_id) not in db and image_address != None:
67+
image_data = {"image_address": image_address, "image_format": image_format, "image_id": image_id}
68+
approved_list.append(image_data)
5869
else:
59-
passed += 1
60-
if passed == user_blacklist_lenght and str(image_id) not in db and image_address != None:
61-
image_data = {"image_address": image_address, "image_format": image_format, "image_id": image_id}
62-
approved_list.append(image_data)
63-
else:
64-
pass
70+
pass
6571

66-
# Download Each file
67-
with alive_bar(len(approved_list), calibrate=1, dual_line=True, title='Downloading') as bar:
68-
for data in approved_list:
69-
image_address = data["image_address"]
70-
image_format = data["image_format"]
71-
image_id = data["image_id"]
72-
bar.text = f'-> Downloading: {image_id}, please wait...'
73-
if user_proxies == True:
74-
proxy = random.choice(proxy_list)
75-
img_data = requests.get(image_address, proxies=proxy).content
76-
else:
77-
sleep(1)
78-
img_data = requests.get(image_address).content
79-
safe_user_tags = user_tags.replace("\\", "").replace("/", "").replace(":", "").replace("*", "").replace("?", "").replace('"', "").replace("<", "").replace(">", "").replace("|", "").replace(" ", "_")
80-
if not os.path.exists("media/" + dt_now + " " + safe_user_tags):
81-
os.mkdir("media/" + dt_now + " " + safe_user_tags)
82-
with open("media/" + dt_now + " " + safe_user_tags + "/" + str(image_id) + "." + image_format, 'wb') as handler:
83-
handler.write(img_data)
84-
with open("db/e621.db", "a") as db_writer:
85-
db_writer.write(f"{str(image_id)}\n")
86-
bar()
72+
# Download Each file
73+
with alive_bar(len(approved_list), calibrate=1, dual_line=True, title='Downloading') as bar:
74+
for data in approved_list:
75+
image_address = data["image_address"]
76+
image_format = data["image_format"]
77+
image_id = data["image_id"]
78+
bar.text = f'-> Downloading: {image_id}, please wait...'
79+
if user_proxies == True:
80+
proxy = random.choice(proxy_list)
81+
img_data = requests.get(image_address, proxies=proxy).content
82+
else:
83+
sleep(1)
84+
img_data = requests.get(image_address).content
85+
86+
safe_user_tags = user_tags.replace(" ", "_")
87+
for char in unsafe_chars:
88+
safe_user_tags = safe_user_tags.replace(char, "")
89+
90+
if not os.path.exists(f"media/{dt_now}_{safe_user_tags}"):
91+
os.mkdir(f"media/{dt_now}_{safe_user_tags}")
92+
with open(f"media/{dt_now}_{safe_user_tags}/{str(image_id)}.{image_format}", 'wb') as handler:
93+
handler.write(img_data)
94+
with open("db/e621.db", "a") as db_writer:
95+
db_writer.write(f"{str(image_id)}\n")
96+
bar()
97+
98+
print(colored(f"Page {page} Completed", "green"))
99+
approved_list.clear()
100+
page += 1
101+
sleep(5)
87102

88-
print(colored(f"Page {page} Completed", "green"))
89-
approved_list.clear()
90-
page += 1
91-
sleep(5)
103+
return {"status": "ok"}
104+
105+
except Exception as e:
106+
return {"status": "error", "uinput": user_tags, "exception": str(e), "extra": raw_req.content}

0 commit comments

Comments
 (0)