Skip to content

Commit 9386614

Browse files
committed
[chore] make format
1 parent e9bb991 commit 9386614

File tree

6 files changed

+56
-59
lines changed

6 files changed

+56
-59
lines changed

extension/httpfs/create_secret_functions.cpp

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ unique_ptr<BaseSecret> CreateS3SecretFunctions::CreateSecretFunctionInternal(Cli
111111
} else if (lower_name == "requester_pays") {
112112
if (named_param.second.type() != LogicalType::BOOLEAN) {
113113
throw InvalidInputException("Invalid type past to secret option: '%s', found '%s', expected: 'BOOLEAN'",
114-
lower_name, named_param.second.type().ToString());
114+
lower_name, named_param.second.type().ToString());
115115
}
116116
secret->secret_map["requester_pays"] = Value::BOOLEAN(named_param.second.GetValue<bool>());
117117
} else if (lower_name == "bearer_token" && input.type == "gcs") {
@@ -195,7 +195,7 @@ void CreateS3SecretFunctions::SetBaseNamedParams(CreateSecretFunction &function,
195195
function.named_parameters["use_ssl"] = LogicalType::BOOLEAN;
196196
function.named_parameters["kms_key_id"] = LogicalType::VARCHAR;
197197
function.named_parameters["url_compatibility_mode"] = LogicalType::BOOLEAN;
198-
function.named_parameters["requester_pays"] = LogicalType::BOOLEAN;
198+
function.named_parameters["requester_pays"] = LogicalType::BOOLEAN;
199199

200200
// Whether a secret refresh attempt should be made when the secret appears to be incorrect
201201
function.named_parameters["refresh"] = LogicalType::VARCHAR;
@@ -214,7 +214,7 @@ void CreateS3SecretFunctions::SetBaseNamedParams(CreateSecretFunction &function,
214214
if (type == "r2") {
215215
function.named_parameters["account_id"] = LogicalType::VARCHAR;
216216
}
217-
217+
218218
if (type == "gcs") {
219219
function.named_parameters["bearer_token"] = LogicalType::VARCHAR;
220220
}

extension/httpfs/httpfs.cpp

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -275,7 +275,8 @@ void TimestampToTimeT(timestamp_t timestamp, time_t &result) {
275275
HTTPFileHandle::HTTPFileHandle(FileSystem &fs, const OpenFileInfo &file, FileOpenFlags flags,
276276
unique_ptr<HTTPParams> params_p)
277277
: FileHandle(fs, file.path, flags), params(std::move(params_p)), http_params(params->Cast<HTTPFSParams>()),
278-
flags(flags), length(0), force_full_download(false), buffer_available(0), buffer_idx(0), file_offset(0), buffer_start(0), buffer_end(0) {
278+
flags(flags), length(0), force_full_download(false), buffer_available(0), buffer_idx(0), file_offset(0),
279+
buffer_start(0), buffer_end(0) {
279280
// check if the handle has extended properties that can be set directly in the handle
280281
// if we have these properties we don't need to do a head request to obtain them later
281282
if (file.extended_info) {

extension/httpfs/httpfs_client_wasm.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ unique_ptr<HTTPClient> HTTPFSUtil::InitializeClient(HTTPParams &http_params, con
99

1010
unordered_map<string, string> HTTPFSUtil::ParseGetParameters(const string &text) {
1111
unordered_map<string, string> result;
12-
//TODO: HTTPFSUtil::ParseGetParameters is currently not implemented
12+
// TODO: HTTPFSUtil::ParseGetParameters is currently not implemented
1313
return result;
1414
}
1515

extension/httpfs/httpfs_extension.cpp

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -13,23 +13,23 @@
1313
namespace duckdb {
1414

1515
static void SetHttpfsClientImplementation(DBConfig &config, const string &value) {
16-
if (config.http_util && config.http_util->GetName() == "WasmHTTPUtils") {
17-
if (value == "wasm" || value == "default") {
18-
// Already handled, do not override
19-
return;
20-
}
21-
throw InvalidInputException("Unsupported option for httpfs_client_implementation, only `wasm` and "
22-
"`default` are currently supported for duckdb-wasm");
23-
}
24-
if (value == "httplib" || value == "default") {
25-
if (!config.http_util || config.http_util->GetName() != "HTTPFSUtil") {
26-
config.http_util = make_shared_ptr<HTTPFSUtil>();
27-
}
16+
if (config.http_util && config.http_util->GetName() == "WasmHTTPUtils") {
17+
if (value == "wasm" || value == "default") {
18+
// Already handled, do not override
2819
return;
2920
}
30-
throw InvalidInputException("Unsupported option for httpfs_client_implementation, only `curl`, `httplib` and "
31-
"`default` are currently supported");
21+
throw InvalidInputException("Unsupported option for httpfs_client_implementation, only `wasm` and "
22+
"`default` are currently supported for duckdb-wasm");
23+
}
24+
if (value == "httplib" || value == "default") {
25+
if (!config.http_util || config.http_util->GetName() != "HTTPFSUtil") {
26+
config.http_util = make_shared_ptr<HTTPFSUtil>();
27+
}
28+
return;
3229
}
30+
throw InvalidInputException("Unsupported option for httpfs_client_implementation, only `curl`, `httplib` and "
31+
"`default` are currently supported");
32+
}
3333

3434
static void LoadInternal(DatabaseInstance &instance) {
3535
auto &fs = instance.GetFileSystem();

extension/httpfs/include/s3fs.hpp

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -30,8 +30,8 @@ struct S3AuthParams {
3030
string url_style;
3131
bool use_ssl = true;
3232
bool s3_url_compatibility_mode = false;
33-
bool requester_pays = false;
34-
string oauth2_bearer_token; // OAuth2 bearer token for GCS
33+
bool requester_pays = false;
34+
string oauth2_bearer_token; // OAuth2 bearer token for GCS
3535

3636
static S3AuthParams ReadFrom(optional_ptr<FileOpener> opener, FileOpenerInfo &info);
3737
};
@@ -47,7 +47,6 @@ struct AWSEnvironmentCredentialsProvider {
4747
static constexpr const char *DUCKDB_KMS_KEY_ID_ENV_VAR = "DUCKDB_S3_KMS_KEY_ID";
4848
static constexpr const char *DUCKDB_REQUESTER_PAYS_ENV_VAR = "DUCKDB_S3_REQUESTER_PAYS";
4949

50-
5150
explicit AWSEnvironmentCredentialsProvider(DBConfig &config) : config(config) {};
5251

5352
DBConfig &config;

extension/httpfs/s3fs.cpp

Lines changed: 34 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ static HTTPHeaders create_s3_header(string url, string query, string host, strin
6767
res["x-amz-request-payer"] = "requester";
6868
}
6969

70-
string signed_headers = "";
70+
string signed_headers = "";
7171
hash_bytes canonical_request_hash;
7272
hash_str canonical_request_hash_str;
7373
if (content_type.length() > 0) {
@@ -83,7 +83,7 @@ static HTTPHeaders create_s3_header(string url, string query, string host, strin
8383
if (use_requester_pays) {
8484
signed_headers += ";x-amz-request-payer";
8585
}
86-
auto canonical_request = method + "\n" + S3FileSystem::UrlEncode(url) + "\n" + query;
86+
auto canonical_request = method + "\n" + S3FileSystem::UrlEncode(url) + "\n" + query;
8787
if (content_type.length() > 0) {
8888
canonical_request += "\ncontent-type:" + content_type;
8989
}
@@ -132,8 +132,7 @@ string S3FileSystem::UrlEncode(const string &input, bool encode_slash) {
132132
}
133133

134134
static bool IsGCSRequest(const string &url) {
135-
return StringUtil::StartsWith(url, "gcs://") ||
136-
StringUtil::StartsWith(url, "gs://");
135+
return StringUtil::StartsWith(url, "gcs://") || StringUtil::StartsWith(url, "gs://");
137136
}
138137

139138
void AWSEnvironmentCredentialsProvider::SetExtensionOptionValue(string key, const char *env_var_name) {
@@ -173,7 +172,7 @@ S3AuthParams AWSEnvironmentCredentialsProvider::CreateParams() {
173172
params.endpoint = DUCKDB_ENDPOINT_ENV_VAR;
174173
params.kms_key_id = DUCKDB_KMS_KEY_ID_ENV_VAR;
175174
params.use_ssl = DUCKDB_USE_SSL_ENV_VAR;
176-
params.requester_pays = DUCKDB_REQUESTER_PAYS_ENV_VAR;
175+
params.requester_pays = DUCKDB_REQUESTER_PAYS_ENV_VAR;
177176

178177
return params;
179178
}
@@ -199,8 +198,7 @@ S3AuthParams S3AuthParams::ReadFrom(optional_ptr<FileOpener> opener, FileOpenerI
199198
secret_reader.TryGetSecretKeyOrSetting("kms_key_id", "s3_kms_key_id", result.kms_key_id);
200199
secret_reader.TryGetSecretKeyOrSetting("s3_url_compatibility_mode", "s3_url_compatibility_mode",
201200
result.s3_url_compatibility_mode);
202-
secret_reader.TryGetSecretKeyOrSetting("requester_pays", "s3_requester_pays",
203-
result.requester_pays);
201+
secret_reader.TryGetSecretKeyOrSetting("requester_pays", "s3_requester_pays", result.requester_pays);
204202

205203
// Endpoint and url style are slightly more complex and require special handling for gcs and r2
206204
auto endpoint_result = secret_reader.TryGetSecretKeyOrSetting("endpoint", "s3_endpoint", result.endpoint);
@@ -219,9 +217,9 @@ S3AuthParams S3AuthParams::ReadFrom(optional_ptr<FileOpener> opener, FileOpenerI
219217
}
220218

221219
if (!result.region.empty() && (result.endpoint.empty() || result.endpoint == "s3.amazonaws.com")) {
222-
result.endpoint = StringUtil::Format("s3.%s.amazonaws.com", result.region);
220+
result.endpoint = StringUtil::Format("s3.%s.amazonaws.com", result.region);
223221
} else if (result.endpoint.empty()) {
224-
result.endpoint = "s3.amazonaws.com";
222+
result.endpoint = "s3.amazonaws.com";
225223
}
226224

227225
return result;
@@ -572,7 +570,7 @@ void S3FileSystem::ReadQueryParams(const string &url_query_param, S3AuthParams &
572570
if (!query_params.empty()) {
573571
throw IOException("Invalid query parameters found. Supported parameters are:\n's3_region', 's3_access_key_id', "
574572
"'s3_secret_access_key', 's3_session_token',\n's3_endpoint', 's3_url_style', 's3_use_ssl', "
575-
"'s3_requester_pays'");
573+
"'s3_requester_pays'");
576574
}
577575
}
578576

@@ -683,7 +681,7 @@ unique_ptr<HTTPResponse> S3FileSystem::PostRequest(FileHandle &handle, string ur
683681
auto auth_params = handle.Cast<S3FileHandle>().auth_params;
684682
auto parsed_s3_url = S3UrlParse(url, auth_params);
685683
string http_url = parsed_s3_url.GetHTTPUrl(auth_params, http_params);
686-
684+
687685
HTTPHeaders headers;
688686
if (IsGCSRequest(url) && !auth_params.oauth2_bearer_token.empty()) {
689687
// Use bearer token for GCS
@@ -694,7 +692,7 @@ unique_ptr<HTTPResponse> S3FileSystem::PostRequest(FileHandle &handle, string ur
694692
// Use existing S3 authentication
695693
auto payload_hash = GetPayloadHash(buffer_in, buffer_in_len);
696694
headers = create_s3_header(parsed_s3_url.path, http_params, parsed_s3_url.host, "s3", "POST", auth_params, "",
697-
"", payload_hash, "application/octet-stream");
695+
"", payload_hash, "application/octet-stream");
698696
}
699697

700698
return HTTPFileSystem::PostRequest(handle, http_url, headers, result, buffer_in, buffer_in_len);
@@ -706,7 +704,7 @@ unique_ptr<HTTPResponse> S3FileSystem::PutRequest(FileHandle &handle, string url
706704
auto parsed_s3_url = S3UrlParse(url, auth_params);
707705
string http_url = parsed_s3_url.GetHTTPUrl(auth_params, http_params);
708706
auto content_type = "application/octet-stream";
709-
707+
710708
HTTPHeaders headers;
711709
if (IsGCSRequest(url) && !auth_params.oauth2_bearer_token.empty()) {
712710
// Use bearer token for GCS
@@ -717,47 +715,47 @@ unique_ptr<HTTPResponse> S3FileSystem::PutRequest(FileHandle &handle, string url
717715
// Use existing S3 authentication
718716
auto payload_hash = GetPayloadHash(buffer_in, buffer_in_len);
719717
headers = create_s3_header(parsed_s3_url.path, http_params, parsed_s3_url.host, "s3", "PUT", auth_params, "",
720-
"", payload_hash, content_type);
718+
"", payload_hash, content_type);
721719
}
722-
720+
723721
return HTTPFileSystem::PutRequest(handle, http_url, headers, buffer_in, buffer_in_len);
724722
}
725723

726724
unique_ptr<HTTPResponse> S3FileSystem::HeadRequest(FileHandle &handle, string s3_url, HTTPHeaders header_map) {
727725
auto auth_params = handle.Cast<S3FileHandle>().auth_params;
728726
auto parsed_s3_url = S3UrlParse(s3_url, auth_params);
729727
string http_url = parsed_s3_url.GetHTTPUrl(auth_params);
730-
728+
731729
HTTPHeaders headers;
732730
if (IsGCSRequest(s3_url) && !auth_params.oauth2_bearer_token.empty()) {
733731
// Use bearer token for GCS
734732
headers["Authorization"] = "Bearer " + auth_params.oauth2_bearer_token;
735733
headers["Host"] = parsed_s3_url.host;
736734
} else {
737735
// Use existing S3 authentication
738-
headers = create_s3_header(parsed_s3_url.path, "", parsed_s3_url.host,
739-
"s3", "HEAD", auth_params, "", "", "", "");
736+
headers =
737+
create_s3_header(parsed_s3_url.path, "", parsed_s3_url.host, "s3", "HEAD", auth_params, "", "", "", "");
740738
}
741-
739+
742740
return HTTPFileSystem::HeadRequest(handle, http_url, headers);
743741
}
744742

745743
unique_ptr<HTTPResponse> S3FileSystem::GetRequest(FileHandle &handle, string s3_url, HTTPHeaders header_map) {
746744
auto auth_params = handle.Cast<S3FileHandle>().auth_params;
747745
auto parsed_s3_url = S3UrlParse(s3_url, auth_params);
748746
string http_url = parsed_s3_url.GetHTTPUrl(auth_params);
749-
747+
750748
HTTPHeaders headers;
751749
if (IsGCSRequest(s3_url) && !auth_params.oauth2_bearer_token.empty()) {
752750
// Use bearer token for GCS
753751
headers["Authorization"] = "Bearer " + auth_params.oauth2_bearer_token;
754752
headers["Host"] = parsed_s3_url.host;
755753
} else {
756754
// Use existing S3 authentication
757-
headers = create_s3_header(parsed_s3_url.path, "", parsed_s3_url.host,
758-
"s3", "GET", auth_params, "", "", "", "");
755+
headers =
756+
create_s3_header(parsed_s3_url.path, "", parsed_s3_url.host, "s3", "GET", auth_params, "", "", "", "");
759757
}
760-
758+
761759
return HTTPFileSystem::GetRequest(handle, http_url, headers);
762760
}
763761

@@ -766,37 +764,37 @@ unique_ptr<HTTPResponse> S3FileSystem::GetRangeRequest(FileHandle &handle, strin
766764
auto auth_params = handle.Cast<S3FileHandle>().auth_params;
767765
auto parsed_s3_url = S3UrlParse(s3_url, auth_params);
768766
string http_url = parsed_s3_url.GetHTTPUrl(auth_params);
769-
767+
770768
HTTPHeaders headers;
771769
if (IsGCSRequest(s3_url) && !auth_params.oauth2_bearer_token.empty()) {
772770
// Use bearer token for GCS
773771
headers["Authorization"] = "Bearer " + auth_params.oauth2_bearer_token;
774772
headers["Host"] = parsed_s3_url.host;
775773
} else {
776774
// Use existing S3 authentication
777-
headers = create_s3_header(parsed_s3_url.path, "", parsed_s3_url.host,
778-
"s3", "GET", auth_params, "", "", "", "");
775+
headers =
776+
create_s3_header(parsed_s3_url.path, "", parsed_s3_url.host, "s3", "GET", auth_params, "", "", "", "");
779777
}
780-
778+
781779
return HTTPFileSystem::GetRangeRequest(handle, http_url, headers, file_offset, buffer_out, buffer_out_len);
782780
}
783781

784782
unique_ptr<HTTPResponse> S3FileSystem::DeleteRequest(FileHandle &handle, string s3_url, HTTPHeaders header_map) {
785783
auto auth_params = handle.Cast<S3FileHandle>().auth_params;
786784
auto parsed_s3_url = S3UrlParse(s3_url, auth_params);
787785
string http_url = parsed_s3_url.GetHTTPUrl(auth_params);
788-
786+
789787
HTTPHeaders headers;
790788
if (IsGCSRequest(s3_url) && !auth_params.oauth2_bearer_token.empty()) {
791789
// Use bearer token for GCS
792790
headers["Authorization"] = "Bearer " + auth_params.oauth2_bearer_token;
793791
headers["Host"] = parsed_s3_url.host;
794792
} else {
795793
// Use existing S3 authentication
796-
headers = create_s3_header(parsed_s3_url.path, "", parsed_s3_url.host,
797-
"s3", "DELETE", auth_params, "", "", "", "");
794+
headers =
795+
create_s3_header(parsed_s3_url.path, "", parsed_s3_url.host, "s3", "DELETE", auth_params, "", "", "", "");
798796
}
799-
797+
800798
return HTTPFileSystem::DeleteRequest(handle, http_url, headers);
801799
}
802800

@@ -1114,8 +1112,7 @@ string S3FileSystem::GetS3AuthError(S3AuthParams &s3_auth_params) {
11141112

11151113
string S3FileSystem::GetGCSAuthError(S3AuthParams &s3_auth_params) {
11161114
string extra_text = "\n\nAuthentication Failure - GCS authentication failed.";
1117-
if (s3_auth_params.oauth2_bearer_token.empty() &&
1118-
s3_auth_params.secret_access_key.empty() &&
1115+
if (s3_auth_params.oauth2_bearer_token.empty() && s3_auth_params.secret_access_key.empty() &&
11191116
s3_auth_params.access_key_id.empty()) {
11201117
extra_text += "\n* No credentials provided.";
11211118
extra_text += "\n* For OAuth2: CREATE SECRET (TYPE GCS, bearer_token 'your-token')";
@@ -1145,15 +1142,15 @@ HTTPException S3FileSystem::GetS3Error(S3AuthParams &s3_auth_params, const HTTPR
11451142

11461143
HTTPException S3FileSystem::GetHTTPError(FileHandle &handle, const HTTPResponse &response, const string &url) {
11471144
auto &s3_handle = handle.Cast<S3FileHandle>();
1148-
1145+
11491146
// Use GCS-specific error for GCS URLs
11501147
if (IsGCSRequest(url) && response.status == HTTPStatusCode::Forbidden_403) {
11511148
string extra_text = GetGCSAuthError(s3_handle.auth_params);
11521149
auto status_message = HTTPFSUtil::GetStatusMessage(response.status);
1153-
throw HTTPException(response, "HTTP error on '%s' (HTTP %d %s)%s", url,
1154-
response.status, status_message, extra_text);
1150+
throw HTTPException(response, "HTTP error on '%s' (HTTP %d %s)%s", url, response.status, status_message,
1151+
extra_text);
11551152
}
1156-
1153+
11571154
return GetS3Error(s3_handle.auth_params, response, url);
11581155
}
11591156
string AWSListObjectV2::Request(string &path, HTTPParams &http_params, S3AuthParams &s3_auth_params,

0 commit comments

Comments
 (0)