1- from modules import E621 , RULE34 , ProxyScraper , FURBOORU , E926 , Multporn , Yiffer , Luscious
1+ from modules import *
22import json
33import os
44from termcolor import colored
55from ctypes import windll
66from time import sleep
77from sys import exit
8+ import inquirer
89
9-
10- version = "1.3.1"
10+ version = "1.4.0"
1111windll .kernel32 .SetConsoleTitleW (f"NN-Downloader | v{ version } " )
1212proxy_list = []
1313header = {"User-Agent" :f"nn-downloader/{ version } (by Official Husko on GitHub)" }
14+ needed_folders = ["db" , "media" ]
15+ database_list = ["e621.db" ]
16+
17+ if os .path .exists ("outdated" ):
18+ version_for_logo = colored (f"v{ version } " , "cyan" , attrs = ["blink" ])
19+ else :
20+ version_for_logo = colored (f"v{ version } " , "cyan" )
21+
22+ logo = f"""{ colored (f'''
23+ d8b db d8b db d8888b. .d88b. db d8b db d8b db db .d88b. .d8b. d8888b. d88888b d8888b.
24+ 888o 88 888o 88 88 `8D .8P Y8. 88 I8I 88 888o 88 88 .8P Y8. d8' `8b 88 `8D 88' 88 `8D
25+ 88V8o 88 88V8o 88 88 88 88 88 88 I8I 88 88V8o 88 88 88 88 88ooo88 88 88 88ooooo 88oobY'
26+ 88 V8o88 88 V8o88 C8888D 88 88 88 88 Y8 I8I 88 88 V8o88 88 88 88 88~~~88 88 88 88~~~~~ 88`8b
27+ 88 V888 88 V888 88 .8D `8b d8' `8b d8'8b d8' 88 V888 88booo. `8b d8' 88 88 88 .8D 88. 88 `88.
28+ VP V8P VP V8P Y8888D' `Y88P' `8b8' `8d8' VP V8P Y88888P `Y88P' YP YP Y8888D' Y88888P 88 YD
29+ { version_for_logo } | by { colored ("Official-Husko" , "yellow" )} ''' , "red" )}
30+ """
1431
1532class Main ():
1633 def main_startup ():
1734 os .system ("cls" )
18- print (colored ("======================================================================================================================" , "red" ))
19- print (colored ("| |" , "red" ))
20- print (colored ("| " + colored ("Product: " , "white" ) + colored ("NN-Downloader" , "green" ) + colored (" |" , "red" ), "red" ))
21- print (colored ("| " + colored ("Version: " , "white" ) + colored (version , "green" ) + colored (" |" , "red" ), "red" ))
22- print (colored ("| " + colored ("Description: " , "white" ) + colored ("Download Naughty images fast from multiple sites." , "green" ) + colored (" |" , "red" ), "red" ))
23- print (colored ("| |" , "red" ))
24- print (colored ("======================================================================================================================" , "red" ))
35+ print (logo )
2536 print ("" )
2637
27- # Check if media folder exists else create it
28- if not os .path .exists ("media" ):
29- os .mkdir ("media" )
38+ # Check if needed folders exists else create them
39+ for folder in needed_folders :
40+ if not os .path .exists (folder ):
41+ os .mkdir (folder )
3042
31- # Check if config exists and read it
32- if os .path .exists ("config.json" ):
33- with open ("config.json" ) as cf :
34- config = json .load (cf )
35- user_proxies = config ["proxies" ]
36- user_OTD = config ["oneTimeDownload" ]
37- user_blacklist = config ["blacklisted_tags" ]
38- user_blocked_formats = config ["blacklisted_formats" ]
3943
40- # Create a new config with default values
44+ if os .path .exists ("config.json" ):
45+ config = Config_Manager .reader ()
46+ oneTimeDownload = config ["oneTimeDownload" ]
47+ use_proxies = config ["proxies" ]
48+ checkForUpdates = config ["checkForUpdates" ]
4149 else :
42- default_config = {
43- "proxies" : "true" ,
44- "oneTimeDownload" : "true" ,
45- "user_credentials" : {
46- "e621" : {
47- "apiUser" : "" ,
48- "apiKey" : ""
49- },
50- "e926" : {
51- "apiUser" : "" ,
52- "apiKey" : ""
53- },
54- "rule34" : {
55- "user_id" : "" ,
56- "pass_hash" : "" ,
57- "comment" : "currently not used"
58- },
59- "yiffer" : {
60- "username" : "" ,
61- "email" : "" ,
62- "id" : "" ,
63- "comment" : "currently not used"
64- },
65- "yiffgallery" : {
66- "pwg_id" : "" ,
67- "comment" : "currently not used"
68- },
69- "furbooru" : {
70- "apiKey" : ""
71- }
72- },
73- "blacklisted_tags" : [
74- "example1" ,
75- "example2"
76- ],
77- "blacklisted_formats" : [
78- "example1" ,
79- "example2"
80- ]
81- }
82- with open ("config.json" , "w" ) as cc :
83- json .dump (default_config , cc , indent = 6 )
84- cc .close ()
50+ config = Config_Manager .creator ()
8551 print (colored ("New Config file generated. Please configure it for your use case and add API keys for needed services." , "green" ))
8652 sleep (7 )
8753 exit (0 )
8854
89- if user_proxies == True :
55+ if checkForUpdates == True :
56+ os .system ("cls" )
57+ print (logo )
58+ print ("" )
59+ print (colored ("Checking for Updates..." , "yellow" ), end = '\r ' )
60+ AutoUpdate .Checker ()
61+ os .system ("cls" )
62+ print (logo )
63+ print ("" )
64+
65+ if use_proxies == True :
9066 print (colored ("Fetching Fresh Proxies..." , "yellow" ), end = '\r ' )
9167 ProxyScraper .Scraper (proxy_list = proxy_list )
9268 print (colored (f"Fetched { len (proxy_list )} Proxies. " , "green" ))
9369 print ("" )
9470
71+ if oneTimeDownload == True :
72+ for database in database_list :
73+ with open (f"db/{ database } " , "a" ) as db_creator :
74+ db_creator .close ()
75+
9576 print (colored ("What site do you want to download from?" , "green" ))
96- site = input ( ">> " ). lower ()
97- if site == "" :
98- print ( colored ( "Please enter a site." , "red" ))
99- sleep ( 3 )
100- Main . main_startup ( )
77+ questions = [
78+ inquirer . List ( 'selection' ,
79+ choices = [ 'E621' , 'E926' , 'Furbooru' , 'Luscious' , 'Multporn' , 'Rule34' , 'Yiffer' ]),
80+ ]
81+ answers = inquirer . prompt ( questions )
10182 print ("" )
10283
84+ site = answers .get ("selection" ).lower ()
85+
10386 if site in ["multporn" , "yiffer" , "luscious" ]:
10487 pass
10588 else :
10689 print (colored ("Please enter the tags you want to use" , "green" ))
10790 user_tags = input (">> " ).lower ()
108- if user_tags == "" :
91+ while user_tags == "" :
10992 print (colored ("Please enter the tags you want." , "red" ))
11093 sleep (3 )
111- Main . main_startup ()
94+ user_tags = input ( ">> " ). lower ()
11295 print ("" )
11396
114- print (colored ("How many pages would you like to get?" , "green" ), " (leave empty for max)" )
97+ print (colored ("How many pages would you like to get?" , "green" ), colored ( " (leave empty for max)" , "yellow" ) )
11598 max_sites = input (">> " ).lower ()
11699 print ("" )
117100
118101 if site == "e621" :
119102 apiUser = config ["user_credentials" ]["e621" ]["apiUser" ]
120103 apiKey = config ["user_credentials" ]["e621" ]["apiKey" ]
104+ if oneTimeDownload == True :
105+ with open ("db/e621.db" , "r" ) as db_reader :
106+ database = db_reader .read ().splitlines ()
121107 if apiKey == "" or apiUser == "" :
122108 print (colored ("Please add your Api Key into the config.json" , "red" ))
123- sleep (3 )
109+ sleep (5 )
124110 else :
125- E621 .Fetcher (user_tags = user_tags , user_blacklist = user_blacklist , proxy_list = proxy_list , max_sites = max_sites , user_proxies = user_proxies , apiUser = apiUser , apiKey = apiKey , header = header )
111+ E621 .Fetcher (user_tags = user_tags , user_blacklist = config [ "blacklisted_tags" ] , proxy_list = proxy_list , max_sites = max_sites , user_proxies = config [ "proxies" ] , apiUser = apiUser , apiKey = apiKey , header = header , db = database )
126112 elif site == "e926" :
127113 apiUser = config ["user_credentials" ]["e926" ]["apiUser" ]
128114 apiKey = config ["user_credentials" ]["e926" ]["apiKey" ]
115+ if oneTimeDownload == True :
116+ with open ("db/e621.db" , "r" ) as db_reader :
117+ database = db_reader .read ().splitlines ()
129118 if apiKey == "" or apiUser == "" :
130119 print (colored ("Please add your Api Key into the config.json" , "red" ))
131- sleep (3 )
120+ sleep (5 )
132121 else :
133- E926 .Fetcher (user_tags = user_tags , user_blacklist = user_blacklist , proxy_list = proxy_list , max_sites = max_sites , user_proxies = user_proxies , apiUser = apiUser , apiKey = apiKey , header = header )
122+ E926 .Fetcher (user_tags = user_tags , user_blacklist = config [ "blacklisted_tags" ] , proxy_list = proxy_list , max_sites = max_sites , user_proxies = config [ "proxies" ] , apiUser = apiUser , apiKey = apiKey , header = header , db = database )
134123 elif site == "rule34" :
135- RULE34 .Fetcher (user_tags = user_tags , user_blacklist = user_blacklist , proxy_list = proxy_list , max_sites = max_sites , user_proxies = user_proxies , header = header )
124+ RULE34 .Fetcher (user_tags = user_tags , user_blacklist = config [ "blacklisted_tags" ] , proxy_list = proxy_list , max_sites = max_sites , user_proxies = config [ "proxies" ] , header = header )
136125 elif site == "furbooru" :
137126 apiKey = config ["user_credentials" ]["furbooru" ]["apiKey" ]
138127 if apiKey == "" :
139128 print (colored ("Please add your Api Key into the config.json" , "red" ))
140- sleep (3 )
129+ sleep (5 )
141130 else :
142- FURBOORU .Fetcher (user_tags = user_tags , user_blacklist = user_blacklist , proxy_list = proxy_list , max_sites = max_sites , user_proxies = user_proxies , apiKey = apiKey , header = header )
131+ FURBOORU .Fetcher (user_tags = user_tags , user_blacklist = config [ "blacklisted_tags" ] , proxy_list = proxy_list , max_sites = max_sites , user_proxies = config [ "proxies" ] , apiKey = apiKey , header = header )
143132 elif site == "multporn" :
144133 print (colored ("Please enter the link. (e.g. https://multporn.net/comics/double_trouble_18)" , "green" ))
145134 URL = input (">> " )
146- Multporn .Fetcher (proxy_list = proxy_list , user_proxies = user_proxies , header = header , URL = URL )
135+ while URL == "" :
136+ print (colored ("Please enter a valid link." , "red" ))
137+ sleep (1.5 )
138+ URL = input (">> " )
139+ Multporn .Fetcher (proxy_list = proxy_list , user_proxies = config ["proxies" ], header = header , URL = URL )
147140 elif site == "yiffer" :
148141 print (colored ("Please enter the link. (e.g. https://yiffer.xyz/Howl & Jasper)" , "green" ))
149142 URL = input (">> " )
150- Yiffer .Fetcher (proxy_list = proxy_list , user_proxies = user_proxies , header = header , URL = URL )
143+ while URL == "" :
144+ print (colored ("Please enter a valid link." , "red" ))
145+ sleep (1.5 )
146+ URL = input (">> " )
147+ Yiffer .Fetcher (proxy_list = proxy_list , user_proxies = config ["proxies" ], header = header , URL = URL )
151148 elif site == "luscious" :
152149 print (colored ("Please enter the link. (e.g. https://www.luscious.net/albums/bifurcation-ongoing_437722)" , "green" ))
153150 URL = input (">> " )
154- Luscious .Fetcher (proxy_list = proxy_list , user_proxies = user_proxies , header = header , URL = URL )
151+ while URL == "" :
152+ print (colored ("Please enter a valid link." , "red" ))
153+ sleep (1.5 )
154+ URL = input (">> " )
155+ Luscious .Fetcher (proxy_list = proxy_list , user_proxies = config ["proxies" ], header = header , URL = URL )
155156
156157
157158 else :
@@ -161,4 +162,15 @@ def main_startup():
161162 Main .main_startup ()
162163
163164if __name__ == '__main__' :
164- Main .main_startup ()
165+ try :
166+ Main .main_startup ()
167+ except KeyboardInterrupt :
168+ print ("User Cancelled" )
169+ sleep (3 )
170+ exit (0 )
171+
172+
173+ """
174+ TODO: fix luscious being broken
175+
176+ """
0 commit comments