Skip to content

Commit 56797e6

Browse files
committed
fix some lint
1 parent e958cc8 commit 56797e6

File tree

4 files changed

+63
-46
lines changed

4 files changed

+63
-46
lines changed

database/.flake8

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
[flake8]
2+
max-line-length = 100

database/https.py

Lines changed: 22 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -15,15 +15,16 @@
1515
import get_leaf_cert
1616
import data_factory
1717

18+
1819
class AcquireLock:
1920
"""
2021
Create a TCP socket to ensure a single instance.
21-
22-
This class creates a TCP socket that binds to a specific port. If an instance
23-
of this class is already running, it will log an error and exit.
22+
23+
This class creates a TCP socket that binds to a specific port. If an instance
24+
of this class is already running, it will log an error and exit.
2425
2526
Example usage:
26-
27+
2728
```python
2829
lock = AcquireLock()
2930
try:
@@ -65,7 +66,6 @@ def __init__(self):
6566
self.misconfigured_server = False
6667
self.leaf_cert = None
6768

68-
6969
def validate_url(self, url):
7070
"""Limit to used domains."""
7171
allowed_domains = (
@@ -80,7 +80,6 @@ def validate_url(self, url):
8080
logging.error("Invalid URL domain: %s", url)
8181
sys.exit(1)
8282

83-
8483
def head(self, curl):
8584
"""Take the curl object to head state, with redirect."""
8685
if isinstance(curl, pycurl.Curl):
@@ -108,7 +107,6 @@ def head(self, curl):
108107
return buffer.getvalue().decode('utf-8')
109108
return ""
110109

111-
112110
def validate_data_type(self, content_type):
113111
"""Limit to used data types."""
114112
valid_content_types = {
@@ -118,14 +116,13 @@ def validate_data_type(self, content_type):
118116
'image/jpeg',
119117
'image/png',
120118
'text/html',
121-
'head' # this is not MIME
119+
'head' # this is not MIME
122120
}
123121

124122
if content_type not in valid_content_types:
125123
logging.error("Invalid content type: %s", content_type)
126124
sys.exit(1)
127125

128-
129126
def set_leaf(self, curl):
130127
"""Write the certificate to a temporary file manually"""
131128
if self.leaf_cert is None:
@@ -141,7 +138,6 @@ def set_leaf(self, curl):
141138
curl.setopt(pycurl.CAINFO, temp_cert_path)
142139
return temp_cert_path
143140

144-
145141
def get_leaf(self, url):
146142
"""Check if we need to grab the first certificate"""
147143
if not self.misconfigured_server:
@@ -164,7 +160,6 @@ def setup_before_get_response(self, url, content_type):
164160
if url.startswith("https://www.trle.net/") and not self.misconfigured_server:
165161
self.get_leaf(url)
166162

167-
168163
def get_response(self, url, content_type):
169164
"""Handle all https requests"""
170165
self.setup_before_get_response(url, content_type)
@@ -217,7 +212,7 @@ def get_response(self, url, content_type):
217212
break
218213

219214
except pycurl.error as curl_error:
220-
#if curl_error.args[0] == 60: # SSL certificate error
215+
# if curl_error.args[0] == 60: # SSL certificate error
221216
logging.error("Request failed: %s", curl_error)
222217
retries += 1
223218
if retries >= max_retries:
@@ -230,7 +225,6 @@ def get_response(self, url, content_type):
230225

231226
return self.close_response(curl, headers, response_buffer, content_type)
232227

233-
234228
def close_response(self, curl, headers, response_buffer, content_type):
235229
"""Pack response and close curl"""
236230
if curl is None:
@@ -251,11 +245,10 @@ def close_response(self, curl, headers, response_buffer, content_type):
251245
response = self.pack_response_buffer(content_type, response_buffer)
252246
curl.close()
253247
return response
254-
logging.error("Unexpected content type: %s, expected %s", \
255-
response_content_type, content_type)
248+
logging.error("Unexpected content type: %s, expected %s",
249+
response_content_type, content_type)
256250
sys.exit(1)
257251

258-
259252
def pack_response_buffer(self, content_type, response_buffer):
260253
"""Validate and return the response based on content type"""
261254
if content_type == 'text/html':
@@ -276,7 +269,6 @@ def pack_response_buffer(self, content_type, response_buffer):
276269
logging.error("Unsupported content type: %s", content_type)
277270
return None
278271

279-
280272
def extract_content_type(self, headers):
281273
"""Read the header lines to look for content-type"""
282274

@@ -313,11 +305,12 @@ def progress_callback(self, total_to_download, downloaded, total_to_upload, uplo
313305
if total_to_download > 0:
314306
if self.progress_bar is None:
315307
# Initialize the progress bar if it's not set
316-
self.progress_bar= tqdm(total=total_to_download,
317-
unit='B',
318-
unit_scale=True,
319-
unit_divisor=1024,
320-
desc="Downloading")
308+
self.progress_bar = tqdm(total=total_to_download,
309+
unit='B',
310+
unit_scale=True,
311+
unit_divisor=1024,
312+
desc="Downloading")
313+
321314
self.progress_bar.update(downloaded - self.progress_bar.n) # Update the progress bar
322315
self.progress_bar.total = total_to_download
323316
return 0 # Returning 0 means to continue
@@ -389,11 +382,12 @@ def download_file(self, url):
389382
curl.setopt(pycurl.WRITEDATA, self.buffer)
390383

391384
# Enable progress meter
392-
self.progress_bar= tqdm(total=total_size,
393-
unit='B',
394-
unit_scale=True,
395-
unit_divisor=1024,
396-
desc="Downloading")
385+
self.progress_bar = tqdm(total=total_size,
386+
unit='B',
387+
unit_scale=True,
388+
unit_divisor=1024,
389+
desc="Downloading")
390+
397391
curl.setopt(pycurl.NOPROGRESS, False)
398392
curl.setopt(pycurl.XFERINFOFUNCTION, self.progress_callback)
399393

@@ -435,6 +429,7 @@ def download_file(self, url):
435429
REQUEST_HANDLER = RequestHandler()
436430
DOWNLOADER = Downloader()
437431

432+
438433
def get(url, content_type):
439434
"""
440435
Get server response from TRLE or Trcustom hosts

database/scrape.py

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -23,16 +23,17 @@
2323
# Some basic URL converters
2424
###############################################################################
2525

26+
2627
def trle_search_parser(url):
2728
"""
2829
Prepares a URL for level title searches on TRLE by encoding special characters.
29-
30-
Note: This function should generally be avoided in favor of searching the local
30+
31+
Note: This function should generally be avoided in favor of searching the local
3132
database, as it may not fully cover all cases or include recent updates.
32-
33+
3334
Args:
3435
url (str): The original URL string to be formatted for TRLE search compatibility.
35-
36+
3637
Returns:
3738
str: The formatted URL with specific characters
3839
replaced to match TRLE encoding requirements.
@@ -46,10 +47,10 @@ def trle_search_parser(url):
4647
def url_postfix(url):
4748
"""
4849
Extracts the file extension from a URL without the leading dot.
49-
50+
5051
Args:
5152
url (str): The URL to extract the file extension from.
52-
53+
5354
Returns:
5455
str: The file extension without the leading dot,
5556
or an empty string if no extension is present.

src/Network.cpp

Lines changed: 32 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -15,11 +15,11 @@
1515
*/
1616

1717
#include "Network.h"
18+
#include <curl/curl.h>
1819
#include <iostream>
1920
#include <string>
2021
#include <boost/asio.hpp>
2122
#include <boost/asio/ssl.hpp>
22-
#include <curl/curl.h>
2323

2424
namespace ssl = boost::asio::ssl;
2525
using tcp = boost::asio::ip::tcp;
@@ -44,7 +44,7 @@ std::string get_ssl_certificate(const std::string& host) {
4444
BIO* bio = BIO_new(BIO_s_mem());
4545
PEM_write_bio_X509(bio, cert);
4646
char* cert_str = nullptr;
47-
long cert_len = BIO_get_mem_data(bio, &cert_str);
47+
qint64 cert_len = BIO_get_mem_data(bio, &cert_str);
4848
std::string cert_buffer(cert_str, cert_len);
4949

5050
BIO_free(bio);
@@ -93,7 +93,7 @@ int Downloader::progress_callback(
9393

9494
// Emit signal only if progress has increased by at least 1%
9595
static int lastEmittedProgress = 0;
96-
if ((int)progress == 0) lastEmittedProgress = 0;
96+
if (static_cast<int>(progress) == 0) lastEmittedProgress = 0;
9797
if (static_cast<int>(progress) > lastEmittedProgress)
9898
{
9999
static Downloader& instance = Downloader::getInstance();
@@ -122,8 +122,16 @@ int Downloader::getStatus()
122122

123123
void Downloader::saveToFile(const QByteArray& data, const QString& filePath)
124124
{
125+
QFileInfo fileInfo(filePath);
126+
127+
if (fileInfo.exists() && !fileInfo.isFile()) {
128+
qDebug() << "Error: The zip path is not a regular file." << filePath;
129+
return;
130+
}
131+
125132
QFile file(filePath);
126-
if (file.open(QIODevice::WriteOnly))
133+
134+
if (file.open(QIODevice::WriteOnly)) // flawfinder: ignore
127135
{
128136
file.write(data);
129137
file.close();
@@ -144,9 +152,17 @@ void Downloader::run()
144152
QByteArray byteArray = urlString.toUtf8();
145153
const char* url_cstring = byteArray.constData();
146154

147-
QString filePath = levelDir_m.absolutePath() + QDir::separator() + file_m;
155+
const QString filePath = levelDir_m.absolutePath() +
156+
QDir::separator() + file_m;
148157

149-
FILE* file = fopen(filePath.toUtf8().constData(), "wb");
158+
QFileInfo fileInfo(filePath);
159+
160+
if (fileInfo.exists() && !fileInfo.isFile()) {
161+
qDebug() << "Error: The zip path is not a regular file." << filePath;
162+
return;
163+
}
164+
165+
FILE* file = fopen(filePath.toUtf8(), "wb"); // flawfinder: ignore
150166
if (!file)
151167
{
152168
qDebug() << "Error opening file for writing:" << filePath;
@@ -162,22 +178,25 @@ void Downloader::run()
162178

163179
// Set up the in-memory blob for curl to use
164180
curl_blob blob;
165-
blob.data = const_cast<void*>(static_cast<const void*>(cert_buffer.data()));
166-
//blob.data = cert_buffer.data();
181+
blob.data = cert_buffer.data();
167182
blob.len = cert_buffer.size();
168183
blob.flags = CURL_BLOB_COPY;
169184

170185
curl_easy_setopt(curl, CURLOPT_URL, url_cstring);
171186
curl_easy_setopt(curl, CURLOPT_CAINFO_BLOB, &blob);
172-
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, Downloader::write_callback);
187+
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION,
188+
Downloader::write_callback);
189+
173190
curl_easy_setopt(curl, CURLOPT_WRITEDATA, &writeData);
174191

175192
// Follow redirects
176193
curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1L);
177194

178195
// Enable progress meter
179196
curl_easy_setopt(curl, CURLOPT_NOPROGRESS, 0L);
180-
curl_easy_setopt(curl, CURLOPT_XFERINFOFUNCTION, Downloader::progress_callback);
197+
curl_easy_setopt(curl, CURLOPT_XFERINFOFUNCTION,
198+
Downloader::progress_callback);
199+
181200
curl_easy_setopt(curl, CURLOPT_XFERINFODATA, nullptr);
182201

183202
// Perform the download
@@ -186,16 +205,16 @@ void Downloader::run()
186205
if (res != CURLE_OK)
187206
{
188207
status_m = 1;
189-
qDebug() << "curl_easy_perform() failed:" << curl_easy_strerror(res);
208+
qDebug() << "CURL failed:" << curl_easy_strerror(res);
190209
// we need to catch any of those that seem inportant here to the GUI
191210
// and reset GUI state
192211
// https://curl.se/libcurl/c/libcurl-errors.html
193-
if(res == 6 || res == 7 || res == 28 || res == 35)
212+
if (res == 6 || res == 7 || res == 28 || res == 35)
194213
{
195214
emit this->networkWorkErrorSignal(1);
196215
QCoreApplication::processEvents();
197216
}
198-
else if(res == CURLE_PEER_FAILED_VERIFICATION)
217+
else if (res == CURLE_PEER_FAILED_VERIFICATION)
199218
{
200219
emit this->networkWorkErrorSignal(2);
201220
QCoreApplication::processEvents();

0 commit comments

Comments
 (0)