@@ -131,6 +131,11 @@ string S3FileSystem::UrlEncode(const string &input, bool encode_slash) {
131131 return StringUtil::URLEncode (input, encode_slash);
132132}
133133
134+ static bool IsGCSRequest (const string &url) {
135+ return StringUtil::StartsWith (url, " gcs://" ) ||
136+ StringUtil::StartsWith (url, " gs://" );
137+ }
138+
134139void AWSEnvironmentCredentialsProvider::SetExtensionOptionValue (string key, const char *env_var_name) {
135140 char *evar;
136141
@@ -209,6 +214,8 @@ S3AuthParams S3AuthParams::ReadFrom(optional_ptr<FileOpener> opener, FileOpenerI
209214 if (result.url_style .empty () || url_style_result.GetScope () != SettingScope::SECRET) {
210215 result.url_style = " path" ;
211216 }
217+ // Read bearer token for GCS
218+ secret_reader.TryGetSecretKey (" bearer_token" , result.oauth2_bearer_token );
212219 }
213220
214221 if (!result.region .empty () && (result.endpoint .empty () || result.endpoint == " s3.amazonaws.com" )) {
@@ -235,9 +242,13 @@ unique_ptr<KeyValueSecret> CreateSecret(vector<string> &prefix_paths_p, string &
235242 return_value->secret_map [" kms_key_id" ] = params.kms_key_id ;
236243 return_value->secret_map [" s3_url_compatibility_mode" ] = params.s3_url_compatibility_mode ;
237244 return_value->secret_map [" requester_pays" ] = params.requester_pays ;
245+ return_value->secret_map [" bearer_token" ] = params.oauth2_bearer_token ;
238246
239247 // ! Set redact keys
240248 return_value->redact_keys = {" secret" , " session_token" };
249+ if (!params.oauth2_bearer_token .empty ()) {
250+ return_value->redact_keys .insert (" bearer_token" );
251+ }
241252
242253 return return_value;
243254}
@@ -672,9 +683,19 @@ unique_ptr<HTTPResponse> S3FileSystem::PostRequest(FileHandle &handle, string ur
672683 auto auth_params = handle.Cast <S3FileHandle>().auth_params ;
673684 auto parsed_s3_url = S3UrlParse (url, auth_params);
674685 string http_url = parsed_s3_url.GetHTTPUrl (auth_params, http_params);
675- auto payload_hash = GetPayloadHash (buffer_in, buffer_in_len);
676- auto headers = create_s3_header (parsed_s3_url.path , http_params, parsed_s3_url.host , " s3" , " POST" , auth_params, " " ,
677- " " , payload_hash, " application/octet-stream" );
686+
687+ HTTPHeaders headers;
688+ if (IsGCSRequest (url) && !auth_params.oauth2_bearer_token .empty ()) {
689+ // Use bearer token for GCS
690+ headers[" Authorization" ] = " Bearer " + auth_params.oauth2_bearer_token ;
691+ headers[" Host" ] = parsed_s3_url.host ;
692+ headers[" Content-Type" ] = " application/octet-stream" ;
693+ } else {
694+ // Use existing S3 authentication
695+ auto payload_hash = GetPayloadHash (buffer_in, buffer_in_len);
696+ headers = create_s3_header (parsed_s3_url.path , http_params, parsed_s3_url.host , " s3" , " POST" , auth_params, " " ,
697+ " " , payload_hash, " application/octet-stream" );
698+ }
678699
679700 return HTTPFileSystem::PostRequest (handle, http_url, headers, result, buffer_in, buffer_in_len);
680701}
@@ -685,28 +706,58 @@ unique_ptr<HTTPResponse> S3FileSystem::PutRequest(FileHandle &handle, string url
685706 auto parsed_s3_url = S3UrlParse (url, auth_params);
686707 string http_url = parsed_s3_url.GetHTTPUrl (auth_params, http_params);
687708 auto content_type = " application/octet-stream" ;
688- auto payload_hash = GetPayloadHash (buffer_in, buffer_in_len);
689-
690- auto headers = create_s3_header (parsed_s3_url.path , http_params, parsed_s3_url.host , " s3" , " PUT" , auth_params, " " ,
691- " " , payload_hash, content_type);
709+
710+ HTTPHeaders headers;
711+ if (IsGCSRequest (url) && !auth_params.oauth2_bearer_token .empty ()) {
712+ // Use bearer token for GCS
713+ headers[" Authorization" ] = " Bearer " + auth_params.oauth2_bearer_token ;
714+ headers[" Host" ] = parsed_s3_url.host ;
715+ headers[" Content-Type" ] = content_type;
716+ } else {
717+ // Use existing S3 authentication
718+ auto payload_hash = GetPayloadHash (buffer_in, buffer_in_len);
719+ headers = create_s3_header (parsed_s3_url.path , http_params, parsed_s3_url.host , " s3" , " PUT" , auth_params, " " ,
720+ " " , payload_hash, content_type);
721+ }
722+
692723 return HTTPFileSystem::PutRequest (handle, http_url, headers, buffer_in, buffer_in_len);
693724}
694725
695726unique_ptr<HTTPResponse> S3FileSystem::HeadRequest (FileHandle &handle, string s3_url, HTTPHeaders header_map) {
696727 auto auth_params = handle.Cast <S3FileHandle>().auth_params ;
697728 auto parsed_s3_url = S3UrlParse (s3_url, auth_params);
698729 string http_url = parsed_s3_url.GetHTTPUrl (auth_params);
699- auto headers =
700- create_s3_header (parsed_s3_url.path , " " , parsed_s3_url.host , " s3" , " HEAD" , auth_params, " " , " " , " " , " " );
730+
731+ HTTPHeaders headers;
732+ if (IsGCSRequest (s3_url) && !auth_params.oauth2_bearer_token .empty ()) {
733+ // Use bearer token for GCS
734+ headers[" Authorization" ] = " Bearer " + auth_params.oauth2_bearer_token ;
735+ headers[" Host" ] = parsed_s3_url.host ;
736+ } else {
737+ // Use existing S3 authentication
738+ headers = create_s3_header (parsed_s3_url.path , " " , parsed_s3_url.host ,
739+ " s3" , " HEAD" , auth_params, " " , " " , " " , " " );
740+ }
741+
701742 return HTTPFileSystem::HeadRequest (handle, http_url, headers);
702743}
703744
704745unique_ptr<HTTPResponse> S3FileSystem::GetRequest (FileHandle &handle, string s3_url, HTTPHeaders header_map) {
705746 auto auth_params = handle.Cast <S3FileHandle>().auth_params ;
706747 auto parsed_s3_url = S3UrlParse (s3_url, auth_params);
707748 string http_url = parsed_s3_url.GetHTTPUrl (auth_params);
708- auto headers =
709- create_s3_header (parsed_s3_url.path , " " , parsed_s3_url.host , " s3" , " GET" , auth_params, " " , " " , " " , " " );
749+
750+ HTTPHeaders headers;
751+ if (IsGCSRequest (s3_url) && !auth_params.oauth2_bearer_token .empty ()) {
752+ // Use bearer token for GCS
753+ headers[" Authorization" ] = " Bearer " + auth_params.oauth2_bearer_token ;
754+ headers[" Host" ] = parsed_s3_url.host ;
755+ } else {
756+ // Use existing S3 authentication
757+ headers = create_s3_header (parsed_s3_url.path , " " , parsed_s3_url.host ,
758+ " s3" , " GET" , auth_params, " " , " " , " " , " " );
759+ }
760+
710761 return HTTPFileSystem::GetRequest (handle, http_url, headers);
711762}
712763
@@ -715,17 +766,37 @@ unique_ptr<HTTPResponse> S3FileSystem::GetRangeRequest(FileHandle &handle, strin
715766 auto auth_params = handle.Cast <S3FileHandle>().auth_params ;
716767 auto parsed_s3_url = S3UrlParse (s3_url, auth_params);
717768 string http_url = parsed_s3_url.GetHTTPUrl (auth_params);
718- auto headers =
719- create_s3_header (parsed_s3_url.path , " " , parsed_s3_url.host , " s3" , " GET" , auth_params, " " , " " , " " , " " );
769+
770+ HTTPHeaders headers;
771+ if (IsGCSRequest (s3_url) && !auth_params.oauth2_bearer_token .empty ()) {
772+ // Use bearer token for GCS
773+ headers[" Authorization" ] = " Bearer " + auth_params.oauth2_bearer_token ;
774+ headers[" Host" ] = parsed_s3_url.host ;
775+ } else {
776+ // Use existing S3 authentication
777+ headers = create_s3_header (parsed_s3_url.path , " " , parsed_s3_url.host ,
778+ " s3" , " GET" , auth_params, " " , " " , " " , " " );
779+ }
780+
720781 return HTTPFileSystem::GetRangeRequest (handle, http_url, headers, file_offset, buffer_out, buffer_out_len);
721782}
722783
723784unique_ptr<HTTPResponse> S3FileSystem::DeleteRequest (FileHandle &handle, string s3_url, HTTPHeaders header_map) {
724785 auto auth_params = handle.Cast <S3FileHandle>().auth_params ;
725786 auto parsed_s3_url = S3UrlParse (s3_url, auth_params);
726787 string http_url = parsed_s3_url.GetHTTPUrl (auth_params);
727- auto headers =
728- create_s3_header (parsed_s3_url.path , " " , parsed_s3_url.host , " s3" , " DELETE" , auth_params, " " , " " , " " , " " );
788+
789+ HTTPHeaders headers;
790+ if (IsGCSRequest (s3_url) && !auth_params.oauth2_bearer_token .empty ()) {
791+ // Use bearer token for GCS
792+ headers[" Authorization" ] = " Bearer " + auth_params.oauth2_bearer_token ;
793+ headers[" Host" ] = parsed_s3_url.host ;
794+ } else {
795+ // Use existing S3 authentication
796+ headers = create_s3_header (parsed_s3_url.path , " " , parsed_s3_url.host ,
797+ " s3" , " DELETE" , auth_params, " " , " " , " " , " " );
798+ }
799+
729800 return HTTPFileSystem::DeleteRequest (handle, http_url, headers);
730801}
731802
@@ -774,7 +845,12 @@ void S3FileHandle::Initialize(optional_ptr<FileOpener> opener) {
774845 }
775846 if (entry->second == " 403" ) {
776847 // 403: FORBIDDEN
777- auto extra_text = S3FileSystem::GetS3AuthError (auth_params);
848+ string extra_text;
849+ if (IsGCSRequest (path)) {
850+ extra_text = S3FileSystem::GetGCSAuthError (auth_params);
851+ } else {
852+ extra_text = S3FileSystem::GetS3AuthError (auth_params);
853+ }
778854 throw Exception (error.Type (), error.RawMessage () + extra_text, extra_info);
779855 }
780856 }
@@ -1036,6 +1112,24 @@ string S3FileSystem::GetS3AuthError(S3AuthParams &s3_auth_params) {
10361112 return extra_text;
10371113}
10381114
1115+ string S3FileSystem::GetGCSAuthError (S3AuthParams &s3_auth_params) {
1116+ string extra_text = " \n\n Authentication Failure - GCS authentication failed." ;
1117+ if (s3_auth_params.oauth2_bearer_token .empty () &&
1118+ s3_auth_params.secret_access_key .empty () &&
1119+ s3_auth_params.access_key_id .empty ()) {
1120+ extra_text += " \n * No credentials provided." ;
1121+ extra_text += " \n * For OAuth2: CREATE SECRET (TYPE GCS, bearer_token 'your-token')" ;
1122+ extra_text += " \n * For HMAC: CREATE SECRET (TYPE GCS, key_id 'key', secret 'secret')" ;
1123+ } else if (!s3_auth_params.oauth2_bearer_token .empty ()) {
1124+ extra_text += " \n * Bearer token was provided but authentication failed." ;
1125+ extra_text += " \n * Ensure your OAuth2 token is valid and not expired." ;
1126+ } else {
1127+ extra_text += " \n * HMAC credentials were provided but authentication failed." ;
1128+ extra_text += " \n * Ensure your HMAC key_id and secret are correct." ;
1129+ }
1130+ return extra_text;
1131+ }
1132+
10391133HTTPException S3FileSystem::GetS3Error (S3AuthParams &s3_auth_params, const HTTPResponse &response, const string &url) {
10401134 string extra_text;
10411135 if (response.status == HTTPStatusCode::BadRequest_400) {
@@ -1051,6 +1145,15 @@ HTTPException S3FileSystem::GetS3Error(S3AuthParams &s3_auth_params, const HTTPR
10511145
10521146HTTPException S3FileSystem::GetHTTPError (FileHandle &handle, const HTTPResponse &response, const string &url) {
10531147 auto &s3_handle = handle.Cast <S3FileHandle>();
1148+
1149+ // Use GCS-specific error for GCS URLs
1150+ if (IsGCSRequest (url) && response.status == HTTPStatusCode::Forbidden_403) {
1151+ string extra_text = GetGCSAuthError (s3_handle.auth_params );
1152+ auto status_message = HTTPFSUtil::GetStatusMessage (response.status );
1153+ throw HTTPException (response, " HTTP error on '%s' (HTTP %d %s)%s" , url,
1154+ response.status , status_message, extra_text);
1155+ }
1156+
10541157 return GetS3Error (s3_handle.auth_params , response, url);
10551158}
10561159string AWSListObjectV2::Request (string &path, HTTPParams &http_params, S3AuthParams &s3_auth_params,
0 commit comments