Skip to content

Commit fdb164f

Browse files
authored
Merge pull request #103 from carlopi/make_format_cpp
[chore] make format on cpp files
2 parents b45426e + 9386614 commit fdb164f

File tree

6 files changed

+56
-59
lines changed

6 files changed

+56
-59
lines changed

extension/httpfs/create_secret_functions.cpp

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ unique_ptr<BaseSecret> CreateS3SecretFunctions::CreateSecretFunctionInternal(Cli
111111
} else if (lower_name == "requester_pays") {
112112
if (named_param.second.type() != LogicalType::BOOLEAN) {
113113
throw InvalidInputException("Invalid type past to secret option: '%s', found '%s', expected: 'BOOLEAN'",
114-
lower_name, named_param.second.type().ToString());
114+
lower_name, named_param.second.type().ToString());
115115
}
116116
secret->secret_map["requester_pays"] = Value::BOOLEAN(named_param.second.GetValue<bool>());
117117
} else if (lower_name == "bearer_token" && input.type == "gcs") {
@@ -195,7 +195,7 @@ void CreateS3SecretFunctions::SetBaseNamedParams(CreateSecretFunction &function,
195195
function.named_parameters["use_ssl"] = LogicalType::BOOLEAN;
196196
function.named_parameters["kms_key_id"] = LogicalType::VARCHAR;
197197
function.named_parameters["url_compatibility_mode"] = LogicalType::BOOLEAN;
198-
function.named_parameters["requester_pays"] = LogicalType::BOOLEAN;
198+
function.named_parameters["requester_pays"] = LogicalType::BOOLEAN;
199199

200200
// Whether a secret refresh attempt should be made when the secret appears to be incorrect
201201
function.named_parameters["refresh"] = LogicalType::VARCHAR;
@@ -214,7 +214,7 @@ void CreateS3SecretFunctions::SetBaseNamedParams(CreateSecretFunction &function,
214214
if (type == "r2") {
215215
function.named_parameters["account_id"] = LogicalType::VARCHAR;
216216
}
217-
217+
218218
if (type == "gcs") {
219219
function.named_parameters["bearer_token"] = LogicalType::VARCHAR;
220220
}

extension/httpfs/httpfs.cpp

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -275,7 +275,8 @@ void TimestampToTimeT(timestamp_t timestamp, time_t &result) {
275275
HTTPFileHandle::HTTPFileHandle(FileSystem &fs, const OpenFileInfo &file, FileOpenFlags flags,
276276
unique_ptr<HTTPParams> params_p)
277277
: FileHandle(fs, file.path, flags), params(std::move(params_p)), http_params(params->Cast<HTTPFSParams>()),
278-
flags(flags), length(0), force_full_download(false), buffer_available(0), buffer_idx(0), file_offset(0), buffer_start(0), buffer_end(0) {
278+
flags(flags), length(0), force_full_download(false), buffer_available(0), buffer_idx(0), file_offset(0),
279+
buffer_start(0), buffer_end(0) {
279280
// check if the handle has extended properties that can be set directly in the handle
280281
// if we have these properties we don't need to do a head request to obtain them later
281282
if (file.extended_info) {

extension/httpfs/httpfs_client_wasm.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ unique_ptr<HTTPClient> HTTPFSUtil::InitializeClient(HTTPParams &http_params, con
99

1010
unordered_map<string, string> HTTPFSUtil::ParseGetParameters(const string &text) {
1111
unordered_map<string, string> result;
12-
//TODO: HTTPFSUtil::ParseGetParameters is currently not implemented
12+
// TODO: HTTPFSUtil::ParseGetParameters is currently not implemented
1313
return result;
1414
}
1515

extension/httpfs/httpfs_extension.cpp

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -13,23 +13,23 @@
1313
namespace duckdb {
1414

1515
static void SetHttpfsClientImplementation(DBConfig &config, const string &value) {
16-
if (config.http_util && config.http_util->GetName() == "WasmHTTPUtils") {
17-
if (value == "wasm" || value == "default") {
18-
// Already handled, do not override
19-
return;
20-
}
21-
throw InvalidInputException("Unsupported option for httpfs_client_implementation, only `wasm` and "
22-
"`default` are currently supported for duckdb-wasm");
23-
}
24-
if (value == "httplib" || value == "default") {
25-
if (!config.http_util || config.http_util->GetName() != "HTTPFSUtil") {
26-
config.http_util = make_shared_ptr<HTTPFSUtil>();
27-
}
16+
if (config.http_util && config.http_util->GetName() == "WasmHTTPUtils") {
17+
if (value == "wasm" || value == "default") {
18+
// Already handled, do not override
2819
return;
2920
}
30-
throw InvalidInputException("Unsupported option for httpfs_client_implementation, only `curl`, `httplib` and "
31-
"`default` are currently supported");
21+
throw InvalidInputException("Unsupported option for httpfs_client_implementation, only `wasm` and "
22+
"`default` are currently supported for duckdb-wasm");
23+
}
24+
if (value == "httplib" || value == "default") {
25+
if (!config.http_util || config.http_util->GetName() != "HTTPFSUtil") {
26+
config.http_util = make_shared_ptr<HTTPFSUtil>();
27+
}
28+
return;
3229
}
30+
throw InvalidInputException("Unsupported option for httpfs_client_implementation, only `curl`, `httplib` and "
31+
"`default` are currently supported");
32+
}
3333

3434
static void LoadInternal(DatabaseInstance &instance) {
3535
auto &fs = instance.GetFileSystem();

extension/httpfs/include/s3fs.hpp

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -30,8 +30,8 @@ struct S3AuthParams {
3030
string url_style;
3131
bool use_ssl = true;
3232
bool s3_url_compatibility_mode = false;
33-
bool requester_pays = false;
34-
string oauth2_bearer_token; // OAuth2 bearer token for GCS
33+
bool requester_pays = false;
34+
string oauth2_bearer_token; // OAuth2 bearer token for GCS
3535

3636
static S3AuthParams ReadFrom(optional_ptr<FileOpener> opener, FileOpenerInfo &info);
3737
};
@@ -47,7 +47,6 @@ struct AWSEnvironmentCredentialsProvider {
4747
static constexpr const char *DUCKDB_KMS_KEY_ID_ENV_VAR = "DUCKDB_S3_KMS_KEY_ID";
4848
static constexpr const char *DUCKDB_REQUESTER_PAYS_ENV_VAR = "DUCKDB_S3_REQUESTER_PAYS";
4949

50-
5150
explicit AWSEnvironmentCredentialsProvider(DBConfig &config) : config(config) {};
5251

5352
DBConfig &config;

extension/httpfs/s3fs.cpp

Lines changed: 34 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ static HTTPHeaders create_s3_header(string url, string query, string host, strin
7070
res["x-amz-request-payer"] = "requester";
7171
}
7272

73-
string signed_headers = "";
73+
string signed_headers = "";
7474
hash_bytes canonical_request_hash;
7575
hash_str canonical_request_hash_str;
7676
if (content_type.length() > 0) {
@@ -87,7 +87,7 @@ static HTTPHeaders create_s3_header(string url, string query, string host, strin
8787
if (use_requester_pays) {
8888
signed_headers += ";x-amz-request-payer";
8989
}
90-
auto canonical_request = method + "\n" + S3FileSystem::UrlEncode(url) + "\n" + query;
90+
auto canonical_request = method + "\n" + S3FileSystem::UrlEncode(url) + "\n" + query;
9191
if (content_type.length() > 0) {
9292
canonical_request += "\ncontent-type:" + content_type;
9393
}
@@ -136,8 +136,7 @@ string S3FileSystem::UrlEncode(const string &input, bool encode_slash) {
136136
}
137137

138138
static bool IsGCSRequest(const string &url) {
139-
return StringUtil::StartsWith(url, "gcs://") ||
140-
StringUtil::StartsWith(url, "gs://");
139+
return StringUtil::StartsWith(url, "gcs://") || StringUtil::StartsWith(url, "gs://");
141140
}
142141

143142
void AWSEnvironmentCredentialsProvider::SetExtensionOptionValue(string key, const char *env_var_name) {
@@ -177,7 +176,7 @@ S3AuthParams AWSEnvironmentCredentialsProvider::CreateParams() {
177176
params.endpoint = DUCKDB_ENDPOINT_ENV_VAR;
178177
params.kms_key_id = DUCKDB_KMS_KEY_ID_ENV_VAR;
179178
params.use_ssl = DUCKDB_USE_SSL_ENV_VAR;
180-
params.requester_pays = DUCKDB_REQUESTER_PAYS_ENV_VAR;
179+
params.requester_pays = DUCKDB_REQUESTER_PAYS_ENV_VAR;
181180

182181
return params;
183182
}
@@ -203,8 +202,7 @@ S3AuthParams S3AuthParams::ReadFrom(optional_ptr<FileOpener> opener, FileOpenerI
203202
secret_reader.TryGetSecretKeyOrSetting("kms_key_id", "s3_kms_key_id", result.kms_key_id);
204203
secret_reader.TryGetSecretKeyOrSetting("s3_url_compatibility_mode", "s3_url_compatibility_mode",
205204
result.s3_url_compatibility_mode);
206-
secret_reader.TryGetSecretKeyOrSetting("requester_pays", "s3_requester_pays",
207-
result.requester_pays);
205+
secret_reader.TryGetSecretKeyOrSetting("requester_pays", "s3_requester_pays", result.requester_pays);
208206

209207
// Endpoint and url style are slightly more complex and require special handling for gcs and r2
210208
auto endpoint_result = secret_reader.TryGetSecretKeyOrSetting("endpoint", "s3_endpoint", result.endpoint);
@@ -223,9 +221,9 @@ S3AuthParams S3AuthParams::ReadFrom(optional_ptr<FileOpener> opener, FileOpenerI
223221
}
224222

225223
if (!result.region.empty() && (result.endpoint.empty() || result.endpoint == "s3.amazonaws.com")) {
226-
result.endpoint = StringUtil::Format("s3.%s.amazonaws.com", result.region);
224+
result.endpoint = StringUtil::Format("s3.%s.amazonaws.com", result.region);
227225
} else if (result.endpoint.empty()) {
228-
result.endpoint = "s3.amazonaws.com";
226+
result.endpoint = "s3.amazonaws.com";
229227
}
230228

231229
return result;
@@ -592,7 +590,7 @@ void S3FileSystem::ReadQueryParams(const string &url_query_param, S3AuthParams &
592590
if (!query_params.empty()) {
593591
throw IOException("Invalid query parameters found. Supported parameters are:\n's3_region', 's3_access_key_id', "
594592
"'s3_secret_access_key', 's3_session_token',\n's3_endpoint', 's3_url_style', 's3_use_ssl', "
595-
"'s3_requester_pays'");
593+
"'s3_requester_pays'");
596594
}
597595
}
598596

@@ -703,7 +701,7 @@ unique_ptr<HTTPResponse> S3FileSystem::PostRequest(FileHandle &handle, string ur
703701
auto auth_params = handle.Cast<S3FileHandle>().auth_params;
704702
auto parsed_s3_url = S3UrlParse(url, auth_params);
705703
string http_url = parsed_s3_url.GetHTTPUrl(auth_params, http_params);
706-
704+
707705
HTTPHeaders headers;
708706
if (IsGCSRequest(url) && !auth_params.oauth2_bearer_token.empty()) {
709707
// Use bearer token for GCS
@@ -714,7 +712,7 @@ unique_ptr<HTTPResponse> S3FileSystem::PostRequest(FileHandle &handle, string ur
714712
// Use existing S3 authentication
715713
auto payload_hash = GetPayloadHash(buffer_in, buffer_in_len);
716714
headers = create_s3_header(parsed_s3_url.path, http_params, parsed_s3_url.host, "s3", "POST", auth_params, "",
717-
"", payload_hash, "application/octet-stream");
715+
"", payload_hash, "application/octet-stream");
718716
}
719717

720718
return HTTPFileSystem::PostRequest(handle, http_url, headers, result, buffer_in, buffer_in_len);
@@ -726,7 +724,7 @@ unique_ptr<HTTPResponse> S3FileSystem::PutRequest(FileHandle &handle, string url
726724
auto parsed_s3_url = S3UrlParse(url, auth_params);
727725
string http_url = parsed_s3_url.GetHTTPUrl(auth_params, http_params);
728726
auto content_type = "application/octet-stream";
729-
727+
730728
HTTPHeaders headers;
731729
if (IsGCSRequest(url) && !auth_params.oauth2_bearer_token.empty()) {
732730
// Use bearer token for GCS
@@ -737,47 +735,47 @@ unique_ptr<HTTPResponse> S3FileSystem::PutRequest(FileHandle &handle, string url
737735
// Use existing S3 authentication
738736
auto payload_hash = GetPayloadHash(buffer_in, buffer_in_len);
739737
headers = create_s3_header(parsed_s3_url.path, http_params, parsed_s3_url.host, "s3", "PUT", auth_params, "",
740-
"", payload_hash, content_type);
738+
"", payload_hash, content_type);
741739
}
742-
740+
743741
return HTTPFileSystem::PutRequest(handle, http_url, headers, buffer_in, buffer_in_len);
744742
}
745743

746744
unique_ptr<HTTPResponse> S3FileSystem::HeadRequest(FileHandle &handle, string s3_url, HTTPHeaders header_map) {
747745
auto auth_params = handle.Cast<S3FileHandle>().auth_params;
748746
auto parsed_s3_url = S3UrlParse(s3_url, auth_params);
749747
string http_url = parsed_s3_url.GetHTTPUrl(auth_params);
750-
748+
751749
HTTPHeaders headers;
752750
if (IsGCSRequest(s3_url) && !auth_params.oauth2_bearer_token.empty()) {
753751
// Use bearer token for GCS
754752
headers["Authorization"] = "Bearer " + auth_params.oauth2_bearer_token;
755753
headers["Host"] = parsed_s3_url.host;
756754
} else {
757755
// Use existing S3 authentication
758-
headers = create_s3_header(parsed_s3_url.path, "", parsed_s3_url.host,
759-
"s3", "HEAD", auth_params, "", "", "", "");
756+
headers =
757+
create_s3_header(parsed_s3_url.path, "", parsed_s3_url.host, "s3", "HEAD", auth_params, "", "", "", "");
760758
}
761-
759+
762760
return HTTPFileSystem::HeadRequest(handle, http_url, headers);
763761
}
764762

765763
unique_ptr<HTTPResponse> S3FileSystem::GetRequest(FileHandle &handle, string s3_url, HTTPHeaders header_map) {
766764
auto auth_params = handle.Cast<S3FileHandle>().auth_params;
767765
auto parsed_s3_url = S3UrlParse(s3_url, auth_params);
768766
string http_url = parsed_s3_url.GetHTTPUrl(auth_params);
769-
767+
770768
HTTPHeaders headers;
771769
if (IsGCSRequest(s3_url) && !auth_params.oauth2_bearer_token.empty()) {
772770
// Use bearer token for GCS
773771
headers["Authorization"] = "Bearer " + auth_params.oauth2_bearer_token;
774772
headers["Host"] = parsed_s3_url.host;
775773
} else {
776774
// Use existing S3 authentication
777-
headers = create_s3_header(parsed_s3_url.path, "", parsed_s3_url.host,
778-
"s3", "GET", auth_params, "", "", "", "");
775+
headers =
776+
create_s3_header(parsed_s3_url.path, "", parsed_s3_url.host, "s3", "GET", auth_params, "", "", "", "");
779777
}
780-
778+
781779
return HTTPFileSystem::GetRequest(handle, http_url, headers);
782780
}
783781

@@ -786,37 +784,37 @@ unique_ptr<HTTPResponse> S3FileSystem::GetRangeRequest(FileHandle &handle, strin
786784
auto auth_params = handle.Cast<S3FileHandle>().auth_params;
787785
auto parsed_s3_url = S3UrlParse(s3_url, auth_params);
788786
string http_url = parsed_s3_url.GetHTTPUrl(auth_params);
789-
787+
790788
HTTPHeaders headers;
791789
if (IsGCSRequest(s3_url) && !auth_params.oauth2_bearer_token.empty()) {
792790
// Use bearer token for GCS
793791
headers["Authorization"] = "Bearer " + auth_params.oauth2_bearer_token;
794792
headers["Host"] = parsed_s3_url.host;
795793
} else {
796794
// Use existing S3 authentication
797-
headers = create_s3_header(parsed_s3_url.path, "", parsed_s3_url.host,
798-
"s3", "GET", auth_params, "", "", "", "");
795+
headers =
796+
create_s3_header(parsed_s3_url.path, "", parsed_s3_url.host, "s3", "GET", auth_params, "", "", "", "");
799797
}
800-
798+
801799
return HTTPFileSystem::GetRangeRequest(handle, http_url, headers, file_offset, buffer_out, buffer_out_len);
802800
}
803801

804802
unique_ptr<HTTPResponse> S3FileSystem::DeleteRequest(FileHandle &handle, string s3_url, HTTPHeaders header_map) {
805803
auto auth_params = handle.Cast<S3FileHandle>().auth_params;
806804
auto parsed_s3_url = S3UrlParse(s3_url, auth_params);
807805
string http_url = parsed_s3_url.GetHTTPUrl(auth_params);
808-
806+
809807
HTTPHeaders headers;
810808
if (IsGCSRequest(s3_url) && !auth_params.oauth2_bearer_token.empty()) {
811809
// Use bearer token for GCS
812810
headers["Authorization"] = "Bearer " + auth_params.oauth2_bearer_token;
813811
headers["Host"] = parsed_s3_url.host;
814812
} else {
815813
// Use existing S3 authentication
816-
headers = create_s3_header(parsed_s3_url.path, "", parsed_s3_url.host,
817-
"s3", "DELETE", auth_params, "", "", "", "");
814+
headers =
815+
create_s3_header(parsed_s3_url.path, "", parsed_s3_url.host, "s3", "DELETE", auth_params, "", "", "", "");
818816
}
819-
817+
820818
return HTTPFileSystem::DeleteRequest(handle, http_url, headers);
821819
}
822820

@@ -1134,8 +1132,7 @@ string S3FileSystem::GetS3AuthError(S3AuthParams &s3_auth_params) {
11341132

11351133
string S3FileSystem::GetGCSAuthError(S3AuthParams &s3_auth_params) {
11361134
string extra_text = "\n\nAuthentication Failure - GCS authentication failed.";
1137-
if (s3_auth_params.oauth2_bearer_token.empty() &&
1138-
s3_auth_params.secret_access_key.empty() &&
1135+
if (s3_auth_params.oauth2_bearer_token.empty() && s3_auth_params.secret_access_key.empty() &&
11391136
s3_auth_params.access_key_id.empty()) {
11401137
extra_text += "\n* No credentials provided.";
11411138
extra_text += "\n* For OAuth2: CREATE SECRET (TYPE GCS, bearer_token 'your-token')";
@@ -1165,15 +1162,15 @@ HTTPException S3FileSystem::GetS3Error(S3AuthParams &s3_auth_params, const HTTPR
11651162

11661163
HTTPException S3FileSystem::GetHTTPError(FileHandle &handle, const HTTPResponse &response, const string &url) {
11671164
auto &s3_handle = handle.Cast<S3FileHandle>();
1168-
1165+
11691166
// Use GCS-specific error for GCS URLs
11701167
if (IsGCSRequest(url) && response.status == HTTPStatusCode::Forbidden_403) {
11711168
string extra_text = GetGCSAuthError(s3_handle.auth_params);
11721169
auto status_message = HTTPFSUtil::GetStatusMessage(response.status);
1173-
throw HTTPException(response, "HTTP error on '%s' (HTTP %d %s)%s", url,
1174-
response.status, status_message, extra_text);
1170+
throw HTTPException(response, "HTTP error on '%s' (HTTP %d %s)%s", url, response.status, status_message,
1171+
extra_text);
11751172
}
1176-
1173+
11771174
return GetS3Error(s3_handle.auth_params, response, url);
11781175
}
11791176
string AWSListObjectV2::Request(string &path, HTTPParams &http_params, S3AuthParams &s3_auth_params,

0 commit comments

Comments
 (0)