@@ -70,7 +70,7 @@ static HTTPHeaders create_s3_header(string url, string query, string host, strin
7070 res[" x-amz-request-payer" ] = " requester" ;
7171 }
7272
73- string signed_headers = " " ;
73+ string signed_headers = " " ;
7474 hash_bytes canonical_request_hash;
7575 hash_str canonical_request_hash_str;
7676 if (content_type.length () > 0 ) {
@@ -87,7 +87,7 @@ static HTTPHeaders create_s3_header(string url, string query, string host, strin
8787 if (use_requester_pays) {
8888 signed_headers += " ;x-amz-request-payer" ;
8989 }
90- auto canonical_request = method + " \n " + S3FileSystem::UrlEncode (url) + " \n " + query;
90+ auto canonical_request = method + " \n " + S3FileSystem::UrlEncode (url) + " \n " + query;
9191 if (content_type.length () > 0 ) {
9292 canonical_request += " \n content-type:" + content_type;
9393 }
@@ -136,8 +136,7 @@ string S3FileSystem::UrlEncode(const string &input, bool encode_slash) {
136136}
137137
138138static bool IsGCSRequest (const string &url) {
139- return StringUtil::StartsWith (url, " gcs://" ) ||
140- StringUtil::StartsWith (url, " gs://" );
139+ return StringUtil::StartsWith (url, " gcs://" ) || StringUtil::StartsWith (url, " gs://" );
141140}
142141
143142void AWSEnvironmentCredentialsProvider::SetExtensionOptionValue (string key, const char *env_var_name) {
@@ -177,7 +176,7 @@ S3AuthParams AWSEnvironmentCredentialsProvider::CreateParams() {
177176 params.endpoint = DUCKDB_ENDPOINT_ENV_VAR;
178177 params.kms_key_id = DUCKDB_KMS_KEY_ID_ENV_VAR;
179178 params.use_ssl = DUCKDB_USE_SSL_ENV_VAR;
180- params.requester_pays = DUCKDB_REQUESTER_PAYS_ENV_VAR;
179+ params.requester_pays = DUCKDB_REQUESTER_PAYS_ENV_VAR;
181180
182181 return params;
183182}
@@ -203,8 +202,7 @@ S3AuthParams S3AuthParams::ReadFrom(optional_ptr<FileOpener> opener, FileOpenerI
203202 secret_reader.TryGetSecretKeyOrSetting (" kms_key_id" , " s3_kms_key_id" , result.kms_key_id );
204203 secret_reader.TryGetSecretKeyOrSetting (" s3_url_compatibility_mode" , " s3_url_compatibility_mode" ,
205204 result.s3_url_compatibility_mode );
206- secret_reader.TryGetSecretKeyOrSetting (" requester_pays" , " s3_requester_pays" ,
207- result.requester_pays );
205+ secret_reader.TryGetSecretKeyOrSetting (" requester_pays" , " s3_requester_pays" , result.requester_pays );
208206
209207 // Endpoint and url style are slightly more complex and require special handling for gcs and r2
210208 auto endpoint_result = secret_reader.TryGetSecretKeyOrSetting (" endpoint" , " s3_endpoint" , result.endpoint );
@@ -223,9 +221,9 @@ S3AuthParams S3AuthParams::ReadFrom(optional_ptr<FileOpener> opener, FileOpenerI
223221 }
224222
225223 if (!result.region .empty () && (result.endpoint .empty () || result.endpoint == " s3.amazonaws.com" )) {
226- result.endpoint = StringUtil::Format (" s3.%s.amazonaws.com" , result.region );
224+ result.endpoint = StringUtil::Format (" s3.%s.amazonaws.com" , result.region );
227225 } else if (result.endpoint .empty ()) {
228- result.endpoint = " s3.amazonaws.com" ;
226+ result.endpoint = " s3.amazonaws.com" ;
229227 }
230228
231229 return result;
@@ -592,7 +590,7 @@ void S3FileSystem::ReadQueryParams(const string &url_query_param, S3AuthParams &
592590 if (!query_params.empty ()) {
593591 throw IOException (" Invalid query parameters found. Supported parameters are:\n 's3_region', 's3_access_key_id', "
594592 " 's3_secret_access_key', 's3_session_token',\n 's3_endpoint', 's3_url_style', 's3_use_ssl', "
595- " 's3_requester_pays'" );
593+ " 's3_requester_pays'" );
596594 }
597595}
598596
@@ -703,7 +701,7 @@ unique_ptr<HTTPResponse> S3FileSystem::PostRequest(FileHandle &handle, string ur
703701 auto auth_params = handle.Cast <S3FileHandle>().auth_params ;
704702 auto parsed_s3_url = S3UrlParse (url, auth_params);
705703 string http_url = parsed_s3_url.GetHTTPUrl (auth_params, http_params);
706-
704+
707705 HTTPHeaders headers;
708706 if (IsGCSRequest (url) && !auth_params.oauth2_bearer_token .empty ()) {
709707 // Use bearer token for GCS
@@ -714,7 +712,7 @@ unique_ptr<HTTPResponse> S3FileSystem::PostRequest(FileHandle &handle, string ur
714712 // Use existing S3 authentication
715713 auto payload_hash = GetPayloadHash (buffer_in, buffer_in_len);
716714 headers = create_s3_header (parsed_s3_url.path , http_params, parsed_s3_url.host , " s3" , " POST" , auth_params, " " ,
717- " " , payload_hash, " application/octet-stream" );
715+ " " , payload_hash, " application/octet-stream" );
718716 }
719717
720718 return HTTPFileSystem::PostRequest (handle, http_url, headers, result, buffer_in, buffer_in_len);
@@ -726,7 +724,7 @@ unique_ptr<HTTPResponse> S3FileSystem::PutRequest(FileHandle &handle, string url
726724 auto parsed_s3_url = S3UrlParse (url, auth_params);
727725 string http_url = parsed_s3_url.GetHTTPUrl (auth_params, http_params);
728726 auto content_type = " application/octet-stream" ;
729-
727+
730728 HTTPHeaders headers;
731729 if (IsGCSRequest (url) && !auth_params.oauth2_bearer_token .empty ()) {
732730 // Use bearer token for GCS
@@ -737,47 +735,47 @@ unique_ptr<HTTPResponse> S3FileSystem::PutRequest(FileHandle &handle, string url
737735 // Use existing S3 authentication
738736 auto payload_hash = GetPayloadHash (buffer_in, buffer_in_len);
739737 headers = create_s3_header (parsed_s3_url.path , http_params, parsed_s3_url.host , " s3" , " PUT" , auth_params, " " ,
740- " " , payload_hash, content_type);
738+ " " , payload_hash, content_type);
741739 }
742-
740+
743741 return HTTPFileSystem::PutRequest (handle, http_url, headers, buffer_in, buffer_in_len);
744742}
745743
746744unique_ptr<HTTPResponse> S3FileSystem::HeadRequest (FileHandle &handle, string s3_url, HTTPHeaders header_map) {
747745 auto auth_params = handle.Cast <S3FileHandle>().auth_params ;
748746 auto parsed_s3_url = S3UrlParse (s3_url, auth_params);
749747 string http_url = parsed_s3_url.GetHTTPUrl (auth_params);
750-
748+
751749 HTTPHeaders headers;
752750 if (IsGCSRequest (s3_url) && !auth_params.oauth2_bearer_token .empty ()) {
753751 // Use bearer token for GCS
754752 headers[" Authorization" ] = " Bearer " + auth_params.oauth2_bearer_token ;
755753 headers[" Host" ] = parsed_s3_url.host ;
756754 } else {
757755 // Use existing S3 authentication
758- headers = create_s3_header (parsed_s3_url. path , " " , parsed_s3_url. host ,
759- " s3" , " HEAD" , auth_params, " " , " " , " " , " " );
756+ headers =
757+ create_s3_header (parsed_s3_url. path , " " , parsed_s3_url. host , " s3" , " HEAD" , auth_params, " " , " " , " " , " " );
760758 }
761-
759+
762760 return HTTPFileSystem::HeadRequest (handle, http_url, headers);
763761}
764762
765763unique_ptr<HTTPResponse> S3FileSystem::GetRequest (FileHandle &handle, string s3_url, HTTPHeaders header_map) {
766764 auto auth_params = handle.Cast <S3FileHandle>().auth_params ;
767765 auto parsed_s3_url = S3UrlParse (s3_url, auth_params);
768766 string http_url = parsed_s3_url.GetHTTPUrl (auth_params);
769-
767+
770768 HTTPHeaders headers;
771769 if (IsGCSRequest (s3_url) && !auth_params.oauth2_bearer_token .empty ()) {
772770 // Use bearer token for GCS
773771 headers[" Authorization" ] = " Bearer " + auth_params.oauth2_bearer_token ;
774772 headers[" Host" ] = parsed_s3_url.host ;
775773 } else {
776774 // Use existing S3 authentication
777- headers = create_s3_header (parsed_s3_url. path , " " , parsed_s3_url. host ,
778- " s3" , " GET" , auth_params, " " , " " , " " , " " );
775+ headers =
776+ create_s3_header (parsed_s3_url. path , " " , parsed_s3_url. host , " s3" , " GET" , auth_params, " " , " " , " " , " " );
779777 }
780-
778+
781779 return HTTPFileSystem::GetRequest (handle, http_url, headers);
782780}
783781
@@ -786,37 +784,37 @@ unique_ptr<HTTPResponse> S3FileSystem::GetRangeRequest(FileHandle &handle, strin
786784 auto auth_params = handle.Cast <S3FileHandle>().auth_params ;
787785 auto parsed_s3_url = S3UrlParse (s3_url, auth_params);
788786 string http_url = parsed_s3_url.GetHTTPUrl (auth_params);
789-
787+
790788 HTTPHeaders headers;
791789 if (IsGCSRequest (s3_url) && !auth_params.oauth2_bearer_token .empty ()) {
792790 // Use bearer token for GCS
793791 headers[" Authorization" ] = " Bearer " + auth_params.oauth2_bearer_token ;
794792 headers[" Host" ] = parsed_s3_url.host ;
795793 } else {
796794 // Use existing S3 authentication
797- headers = create_s3_header (parsed_s3_url. path , " " , parsed_s3_url. host ,
798- " s3" , " GET" , auth_params, " " , " " , " " , " " );
795+ headers =
796+ create_s3_header (parsed_s3_url. path , " " , parsed_s3_url. host , " s3" , " GET" , auth_params, " " , " " , " " , " " );
799797 }
800-
798+
801799 return HTTPFileSystem::GetRangeRequest (handle, http_url, headers, file_offset, buffer_out, buffer_out_len);
802800}
803801
804802unique_ptr<HTTPResponse> S3FileSystem::DeleteRequest (FileHandle &handle, string s3_url, HTTPHeaders header_map) {
805803 auto auth_params = handle.Cast <S3FileHandle>().auth_params ;
806804 auto parsed_s3_url = S3UrlParse (s3_url, auth_params);
807805 string http_url = parsed_s3_url.GetHTTPUrl (auth_params);
808-
806+
809807 HTTPHeaders headers;
810808 if (IsGCSRequest (s3_url) && !auth_params.oauth2_bearer_token .empty ()) {
811809 // Use bearer token for GCS
812810 headers[" Authorization" ] = " Bearer " + auth_params.oauth2_bearer_token ;
813811 headers[" Host" ] = parsed_s3_url.host ;
814812 } else {
815813 // Use existing S3 authentication
816- headers = create_s3_header (parsed_s3_url. path , " " , parsed_s3_url. host ,
817- " s3" , " DELETE" , auth_params, " " , " " , " " , " " );
814+ headers =
815+ create_s3_header (parsed_s3_url. path , " " , parsed_s3_url. host , " s3" , " DELETE" , auth_params, " " , " " , " " , " " );
818816 }
819-
817+
820818 return HTTPFileSystem::DeleteRequest (handle, http_url, headers);
821819}
822820
@@ -1134,8 +1132,7 @@ string S3FileSystem::GetS3AuthError(S3AuthParams &s3_auth_params) {
11341132
11351133string S3FileSystem::GetGCSAuthError (S3AuthParams &s3_auth_params) {
11361134 string extra_text = " \n\n Authentication Failure - GCS authentication failed." ;
1137- if (s3_auth_params.oauth2_bearer_token .empty () &&
1138- s3_auth_params.secret_access_key .empty () &&
1135+ if (s3_auth_params.oauth2_bearer_token .empty () && s3_auth_params.secret_access_key .empty () &&
11391136 s3_auth_params.access_key_id .empty ()) {
11401137 extra_text += " \n * No credentials provided." ;
11411138 extra_text += " \n * For OAuth2: CREATE SECRET (TYPE GCS, bearer_token 'your-token')" ;
@@ -1165,15 +1162,15 @@ HTTPException S3FileSystem::GetS3Error(S3AuthParams &s3_auth_params, const HTTPR
11651162
11661163HTTPException S3FileSystem::GetHTTPError (FileHandle &handle, const HTTPResponse &response, const string &url) {
11671164 auto &s3_handle = handle.Cast <S3FileHandle>();
1168-
1165+
11691166 // Use GCS-specific error for GCS URLs
11701167 if (IsGCSRequest (url) && response.status == HTTPStatusCode::Forbidden_403) {
11711168 string extra_text = GetGCSAuthError (s3_handle.auth_params );
11721169 auto status_message = HTTPFSUtil::GetStatusMessage (response.status );
1173- throw HTTPException (response, " HTTP error on '%s' (HTTP %d %s)%s" , url,
1174- response. status , status_message, extra_text);
1170+ throw HTTPException (response, " HTTP error on '%s' (HTTP %d %s)%s" , url, response. status , status_message,
1171+ extra_text);
11751172 }
1176-
1173+
11771174 return GetS3Error (s3_handle.auth_params , response, url);
11781175}
11791176string AWSListObjectV2::Request (string &path, HTTPParams &http_params, S3AuthParams &s3_auth_params,
0 commit comments