Skip to content

Commit dadd513

Browse files
Changed some dynamic checks, removed multiple downloading processes
Changed some dynamic checks, removed multiple downloading processes, and restricted the max connections to the server. # They changed the implementation, and are changing the hostname of the subdomain in runtime using JS, files weren't being downloaded. # Probably to trick scripts :P # Before: https://mountainoservoo002.animecdn.com/Black-Clover/Black-Clover-Episode-2-1080p.mp4 # After: https://mountainoservo0002.animecdn.com/Black-Clover/Black-Clover-Episode-2-1080p.mp4 # The replacement below fixes that issue and proceeds with downloading of files! Changing threads to 5 since the server returns an error on 16 connections at once. Also, on 5 speed is better than 16, I think they limit your speed based on the connections you're making.
1 parent 88e729d commit dadd513

File tree

1 file changed

+16
-7
lines changed

1 file changed

+16
-7
lines changed

4anime.py

Lines changed: 16 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ def addArguments():
1515

1616
basicFuncs = parser.add_argument_group(f'Input required')
1717
basicFuncs.add_argument('-u', '--url', action="store", dest="url", default=False, help='URL of Anime to download')
18-
basicFuncs.add_argument('-p', '--processes', action="store", dest="p", default=False, help='Parallel downloading processes')
18+
# basicFuncs.add_argument('-p', '--processes', action="store", dest="p", default=False, help='Parallel downloading processes')
1919

2020
opts = parser.add_argument_group(f'Arguments')
2121
opts.add_argument('-s', '--start', action="store", dest="start", default=False, help='Where to start from? (i.e. from 100)')
@@ -80,9 +80,17 @@ def parseEpisodeLink(link):
8080
soup = BeautifulSoup(responseText, 'html.parser')
8181

8282
title = soup.find('title').text
83-
ddl = soup.find('source')['src']
8483

85-
# command = f"aria2c -s 10 -j 10 -x 16 --file-allocation=none -c -o '{title}.mp4' '{ddl}'"
84+
# They changed the implementation, and are changing the hostname of the subdomain in runtime using JS, files weren't being downloaded.
85+
# Probably to trick scripts :P
86+
87+
# Before: https://mountainoservoo002.animecdn.com/Black-Clover/Black-Clover-Episode-2-1080p.mp4
88+
# After: https://mountainoservo0002.animecdn.com/Black-Clover/Black-Clover-Episode-2-1080p.mp4
89+
90+
# The replacement below fixes that issue and proceeds with downloading of files!
91+
92+
ddl = soup.find('source')['src'].replace('mountainoservoo002', 'mountainoservo0002')
93+
8694
print(title, ddl)
8795
return(title, ddl)
8896

@@ -97,14 +105,15 @@ def downloadEpisodes(command, directory):
97105
command = ""
98106

99107
else:
100-
command = f"aria2c -s 10 -j 10 -x 16 --file-allocation=none -c -o '{fileName}' '{ddl}'"
108+
# Changing threads to 5 since the server returns error on 16 connections at once. Also, on 5 speed is better than 16, I think they limit your speed based on the connections you're making.
109+
command = f"aria2c -s 10 -j 10 -x 5 --file-allocation=none -c -o '{fileName}' '{ddl}'"
101110
print(command)
102111

103112
os.system(command)
104113

105114
def main():
106115
PPROCESSES = 10 # Parsing Processes
107-
DPROCESSES = 2 # Downloading Processes
116+
DPROCESSES = 1 # Downloading Processes
108117

109118
args, parser = addArguments()
110119

@@ -113,8 +122,8 @@ def main():
113122
bar = "-" * 70
114123
commands = []
115124

116-
if args.p:
117-
DPROCESSES = int(args.p)
125+
# if args.p:
126+
# DPROCESSES = int(args.p)
118127

119128
legitURL = checkAnimeURL(url)
120129

0 commit comments

Comments
 (0)