diff --git a/src/endpoint/s3/s3_utils.js b/src/endpoint/s3/s3_utils.js index ffd2c0c276..72b9a1cdda 100644 --- a/src/endpoint/s3/s3_utils.js +++ b/src/endpoint/s3/s3_utils.js @@ -667,15 +667,9 @@ function parse_body_logging_xml(req) { } function get_http_response_date(res) { - const r = get_http_response_from_resp(res); - if (!r.httpResponse.headers.date) throw new Error("date not found in response header"); - return r.httpResponse.headers.date; -} - -function get_http_response_from_resp(res) { - const r = res.$response; - if (!r) throw new Error("no $response in s3 returned object"); - return r; + if (res.$metadata.httpStatusCode !== 200) throw new Error("Response return with error"); + if (!res.LastModified) throw new Error("Date not found in response header"); + return res.LastModified; } function get_response_field_encoder(req) { @@ -861,7 +855,6 @@ exports.parse_lock_header = parse_lock_header; exports.parse_body_object_lock_conf_xml = parse_body_object_lock_conf_xml; exports.parse_to_camel_case = parse_to_camel_case; exports._is_valid_retention = _is_valid_retention; -exports.get_http_response_from_resp = get_http_response_from_resp; exports.get_http_response_date = get_http_response_date; exports.XATTR_SORT_SYMBOL = XATTR_SORT_SYMBOL; exports.get_response_field_encoder = get_response_field_encoder; diff --git a/src/sdk/namespace_s3.js b/src/sdk/namespace_s3.js index 834e2c966b..4195075fe7 100644 --- a/src/sdk/namespace_s3.js +++ b/src/sdk/namespace_s3.js @@ -2,7 +2,6 @@ 'use strict'; const _ = require('lodash'); -const AWS = require('aws-sdk'); const util = require('util'); const dbg = require('../util/debug_module')(__filename); @@ -12,6 +11,7 @@ const cloud_utils = require('../util/cloud_utils'); const stream_utils = require('../util/stream_utils'); const blob_translator = require('./blob_translator'); const S3Error = require('../endpoint/s3/s3_errors').S3Error; +const noobaa_s3_client = require('../sdk/noobaa_s3_client/noobaa_s3_client'); /** * @implements {nb.Namespace} @@ -21,20 +21,24 @@ class NamespaceS3 { /** * @param {{ * namespace_resource_id: any, - * s3_params: AWS.S3.ClientConfiguration & { + * s3_params: import("@aws-sdk/client-s3").S3ClientConfig & { * access_mode?: string, * aws_sts_arn?: string, * }, + * bucket?: string, * stats: import('./endpoint_stats_collector').EndpointStatsCollector, * }} args */ - constructor({ namespace_resource_id, s3_params, stats }) { + constructor({ namespace_resource_id, s3_params, bucket, stats }) { this.namespace_resource_id = namespace_resource_id; this.s3_params = s3_params; this.access_key = s3_params.accessKeyId; this.endpoint = s3_params.endpoint; - this.s3 = new AWS.S3(s3_params); - this.bucket = String(this.s3.config.params.Bucket); + this.s3 = noobaa_s3_client.get_s3_client_v3_params(s3_params); + if (!bucket) { + throw new Error('NamespaceS3: bucket is required'); + } + this.bucket = String(bucket); this.access_mode = s3_params.access_mode; this.stats = stats; } @@ -85,7 +89,7 @@ class NamespaceS3 { Delimiter: params.delimiter, Marker: params.key_marker, MaxKeys: params.limit, - }).promise(); + }); dbg.log0('NamespaceS3.list_objects:', this.bucket, inspect(params), 'list', inspect(res)); @@ -117,7 +121,7 @@ class NamespaceS3 { KeyMarker: params.key_marker, UploadIdMarker: params.upload_id_marker, MaxUploads: params.limit, - }).promise(); + }); dbg.log0('NamespaceS3.list_uploads:', this.bucket, inspect(params), 'list', inspect(res)); @@ -142,7 +146,7 @@ class NamespaceS3 { KeyMarker: params.key_marker, VersionIdMarker: params.version_id_marker, MaxKeys: params.limit, - }).promise(); + }); dbg.log0('NamespaceS3.list_object_versions:', this.bucket, inspect(params), 'list', inspect(res)); @@ -183,7 +187,7 @@ class NamespaceS3 { dbg.log0('NamespaceS3.read_object_md:', this.bucket, inspect(params)); await this._prepare_sts_client(); - /** @type {AWS.S3.HeadObjectRequest | AWS.S3.GetObjectRequest} */ + /** @type {import("@aws-sdk/client-s3").HeadObjectRequest | import("@aws-sdk/client-s3").GetObjectRequest} */ const request = { Bucket: this.bucket, Key: params.key, @@ -203,16 +207,18 @@ class NamespaceS3 { let res; try { res = can_use_get_inline ? - await this.s3.getObject(request).promise() : - await this.s3.headObject(request).promise(); + await this.s3.getObject(request) : + await this.s3.headObject(request); } catch (err) { - // catch invalid range error for objects of size 0 and trying head object instead - if (err.code !== 'InvalidRange') { + // catch invalid range error for objects of size 0 and try head object instead + const httpCode = err?.$metadata?.httpStatusCode; + const isInvalidRange = err?.name === 'InvalidRange' || httpCode === 416; + if (!isInvalidRange) { throw err; } - res = await this.s3.headObject({ ...request, Range: undefined }).promise(); + res = await this.s3.headObject({ ...request, Range: undefined }); } - dbg.log0('NamespaceS3.read_object_md:', this.bucket, inspect(params), 'res', inspect(res)); + dbg.log0('NamespaceS3.read_object_md:', this.bucket, inspect(params), 'metadata', inspect(res.$metadata)); return this._get_s3_object_info(res, params.bucket, params.part_number); } catch (err) { this._translate_error_code(params, err); @@ -240,51 +246,44 @@ class NamespaceS3 { dbg.log0('NamespaceS3.read_object_stream:', this.bucket, inspect(_.omit(params, 'object_md.ns'))); await this._prepare_sts_client(); - return new Promise((resolve, reject) => { - /** @type {AWS.S3.HeadObjectRequest & AWS.S3.GetObjectRequest | AWS.S3.CopyObjectRequest} */ - const request = { - Bucket: this.bucket, - Key: params.key, - Range: params.end ? `bytes=${params.start}-${params.end - 1}` : undefined, - PartNumber: params.part_number, - }; - this._set_md_conditions(params, request); - this._assign_encryption_to_request(params, request); - const req = this.s3.getObject(request) - .on('error', err => { - this._translate_error_code(params, err); - dbg.warn('NamespaceS3.read_object_stream:', inspect(err)); - reject(err); - }) - .on('httpHeaders', (statusCode, headers, res) => { - dbg.log0('NamespaceS3.read_object_stream:', + /** @type {import("@aws-sdk/client-s3").HeadObjectRequest & import("@aws-sdk/client-s3").GetObjectRequest | import("@aws-sdk/client-s3").CopyObjectRequest} */ + const request = { + Bucket: this.bucket, + Key: params.key, + Range: params.end ? `bytes=${params.start}-${params.end - 1}` : undefined, + PartNumber: params.part_number, + }; + this._set_md_conditions(params, request); + this._assign_encryption_to_request(params, request); + try { + const obj_out = await this.s3.getObject(request); + dbg.log0('NamespaceS3.read_object_stream:', this.bucket, inspect(_.omit(params, 'object_md.ns')), - 'statusCode', statusCode, - 'headers', headers ); - if (statusCode >= 300) return; // will be handled by error event - req.removeListener('httpData', AWS.EventListeners.Core.HTTP_DATA); - req.removeListener('httpError', AWS.EventListeners.Core.HTTP_ERROR); - let count = 1; - // on s3 read_object_md might not return x-amz-tagging-count header, so we get it here - params.tag_count = headers['x-amz-tagging-count']; - const count_stream = stream_utils.get_tap_stream(data => { - this.stats?.update_namespace_read_stats({ - namespace_resource_id: this.namespace_resource_id, - bucket_name: params.bucket, - size: data.length, - count - }); - // clear count for next updates - count = 0; - }); - const read_stream = /** @type {import('stream').Readable} */ - (res.httpResponse.createUnbufferedStream()); - return resolve(read_stream.pipe(count_stream)); + // In v3, non-2xx typically throws; keep this guard harmless. + if (obj_out.$metadata.httpStatusCode >= 300) throw new Error(`S3 getObject failed with status ${obj_out.$metadata.httpStatusCode}`); + let count = 1; + // on s3 read_object_md might not return x-amz-tagging-count header, so we get it here + //params.tag_count = headers['x-amz-tagging-count']; + const count_stream = stream_utils.get_tap_stream(data => { + this.stats?.update_namespace_read_stats({ + namespace_resource_id: this.namespace_resource_id, + bucket_name: params.bucket, + size: data.length, + count }); - req.send(); - }); + // clear count for next updates + count = 0; + }); + const read_stream = /** @type {import('stream').Readable} **/ (obj_out.Body); + // Return a live stream to be piped by the caller (endpoint) + return read_stream.pipe(count_stream); + } catch (err) { + this._translate_error_code(params, err); + dbg.warn('NamespaceS3.read_object_stream:', inspect(err)); + throw err; + } } @@ -305,7 +304,7 @@ class NamespaceS3 { throw new Error('NamespaceS3.upload_object: CopySourceRange not supported by s3.copyObject()'); } - /** @type {AWS.S3.CopyObjectRequest} */ + /** @type {import("@aws-sdk/client-s3").CopyObjectRequest} */ const request = { Bucket: this.bucket, Key: params.key, @@ -320,7 +319,7 @@ class NamespaceS3 { this._assign_encryption_to_request(params, request); - res = await this.s3.copyObject(request).promise(); + res = await this.s3.copyObject(request); } else { let count = 1; const count_stream = stream_utils.get_tap_stream(data => { @@ -334,7 +333,7 @@ class NamespaceS3 { count = 0; }); - /** @type {AWS.S3.PutObjectRequest} */ + /** @type {import("@aws-sdk/client-s3").PutObjectRequest} */ const request = { Bucket: this.bucket, Key: params.key, @@ -345,10 +344,9 @@ class NamespaceS3 { Metadata: params.xattr, Tagging, }; - this._assign_encryption_to_request(params, request); try { - res = await this.s3.putObject(request).promise(); + res = await this.s3.putObject(request); } catch (err) { object_sdk.rpc_client.pool.update_issues_report({ namespace_resource_id: this.namespace_resource_id, @@ -359,8 +357,15 @@ class NamespaceS3 { } } dbg.log0('NamespaceS3.upload_object:', this.bucket, inspect(params), 'res', inspect(res)); - const etag = s3_utils.parse_etag(res.ETag); - const last_modified_time = s3_utils.get_http_response_date(res); + const etag = s3_utils.parse_etag(res.ETag || res.CopyObjectResult?.ETag); + /** @type {import("@aws-sdk/client-s3").HeadObjectRequest} */ + const request = { + Bucket: this.bucket, + Key: params.key, + VersionId: params.version_id, + }; + const res_head = await this.s3.headObject(request); + const last_modified_time = await s3_utils.get_http_response_date(res_head); return { etag, version_id: res.VersionId, last_modified_time }; } @@ -389,7 +394,7 @@ class NamespaceS3 { await this._prepare_sts_client(); const Tagging = params.tagging && params.tagging.map(tag => tag.key + '=' + tag.value).join('&'); - /** @type {AWS.S3.CreateMultipartUploadRequest} */ + /** @type {import("@aws-sdk/client-s3").CreateMultipartUploadRequest} */ const request = { Bucket: this.bucket, Key: params.key, @@ -399,7 +404,7 @@ class NamespaceS3 { Tagging }; this._assign_encryption_to_request(params, request); - const res = await this.s3.createMultipartUpload(request).promise(); + const res = await this.s3.createMultipartUpload(request); dbg.log0('NamespaceS3.create_object_upload:', this.bucket, inspect(params), 'res', inspect(res)); return { obj_id: res.UploadId }; @@ -410,10 +415,11 @@ class NamespaceS3 { await this._prepare_sts_client(); let res; + let etag; if (params.copy_source) { const { copy_source, copy_source_range } = s3_utils.format_copy_source(params.copy_source); - /** @type {AWS.S3.UploadPartCopyRequest} */ + /** @type {import("@aws-sdk/client-s3").UploadPartCopyRequest} */ const request = { Bucket: this.bucket, Key: params.key, @@ -425,7 +431,9 @@ class NamespaceS3 { this._assign_encryption_to_request(params, request); - res = await this.s3.uploadPartCopy(request).promise(); + res = await this.s3.uploadPartCopy(request); + dbg.log0('NamespaceS3.upload_multipart uploadPartCopy:', this.bucket, inspect(params), 'res', inspect(res)); + etag = s3_utils.parse_etag(res.CopyPartResult.ETag); } else { let count = 1; const count_stream = stream_utils.get_tap_stream(data => { @@ -438,7 +446,7 @@ class NamespaceS3 { count = 0; }); - /** @type {AWS.S3.UploadPartRequest} */ + /** @type {import("@aws-sdk/client-s3").UploadPartRequest} */ const request = { Bucket: this.bucket, Key: params.key, @@ -451,7 +459,7 @@ class NamespaceS3 { this._assign_encryption_to_request(params, request); try { - res = await this.s3.uploadPart(request).promise(); + res = await this.s3.uploadPart(request); } catch (err) { object_sdk.rpc_client.pool.update_issues_report({ namespace_resource_id: this.namespace_resource_id, @@ -460,24 +468,24 @@ class NamespaceS3 { }); throw err; } + dbg.log0('NamespaceS3.upload_multipart uploadPart:', this.bucket, inspect(params), 'res', inspect(res)); + etag = s3_utils.parse_etag(res.ETag); } - dbg.log0('NamespaceS3.upload_multipart:', this.bucket, inspect(params), 'res', inspect(res)); - const etag = s3_utils.parse_etag(res.ETag); return { etag }; } async list_multiparts(params, object_sdk) { dbg.log0('NamespaceS3.list_multiparts:', this.bucket, inspect(params)); await this._prepare_sts_client(); - - const res = await this.s3.listParts({ + /** @type {import("@aws-sdk/client-s3").ListPartsRequest} */ + const req = { Bucket: this.bucket, Key: params.key, UploadId: params.obj_id, MaxParts: params.max, - PartNumberMarker: params.num_marker, - }).promise(); - + PartNumberMarker: params.num_marker?.toString(), + }; + const res = await this.s3.listParts(req); dbg.log0('NamespaceS3.list_multiparts:', this.bucket, inspect(params), 'res', inspect(res)); return { is_truncated: res.IsTruncated, @@ -505,7 +513,7 @@ class NamespaceS3 { ETag: `"${p.etag}"`, })) } - }).promise(); + }); dbg.log0('NamespaceS3.complete_object_upload:', this.bucket, inspect(params), 'res', inspect(res)); const etag = s3_utils.parse_etag(res.ETag); @@ -520,7 +528,7 @@ class NamespaceS3 { Bucket: this.bucket, Key: params.key, UploadId: params.obj_id, - }).promise(); + }); dbg.log0('NamespaceS3.abort_object_upload:', this.bucket, inspect(params), 'res', inspect(res)); } @@ -543,7 +551,7 @@ class NamespaceS3 { Key: params.key, VersionId: params.version_id, Tagging: { TagSet } - }).promise(); + }); dbg.log0('NamespaceS3.put_object_tagging:', this.bucket, inspect(params), 'res', inspect(res)); @@ -560,7 +568,7 @@ class NamespaceS3 { Bucket: this.bucket, Key: params.key, VersionId: params.version_id - }).promise(); + }); dbg.log0('NamespaceS3.delete_object_tagging:', this.bucket, inspect(params), 'res', inspect(res)); @@ -577,7 +585,7 @@ class NamespaceS3 { Bucket: this.bucket, Key: params.key, VersionId: params.version_id - }).promise(); + }); dbg.log0('NamespaceS3.get_object_tagging:', this.bucket, inspect(params), 'res', inspect(res)); @@ -604,7 +612,7 @@ class NamespaceS3 { Bucket: this.bucket, Key: params.key, VersionId: params.version_id - }).promise(); + }); dbg.log0('NamespaceS3.get_object_acl:', this.bucket, inspect(params), 'res', inspect(res)); @@ -623,7 +631,7 @@ class NamespaceS3 { Key: params.key, VersionId: params.version_id, ACL: params.acl - }).promise(); + }); dbg.log0('NamespaceS3.put_object_acl:', this.bucket, inspect(params), 'res', inspect(res)); } @@ -640,7 +648,7 @@ class NamespaceS3 { Bucket: this.bucket, Key: params.key, VersionId: params.version_id, - }).promise(); + }); dbg.log0('NamespaceS3.delete_object:', this.bucket, @@ -672,7 +680,7 @@ class NamespaceS3 { VersionId: obj.version_id, })) } - }).promise(); + }); dbg.log0('NamespaceS3.delete_multiple_objects:', this.bucket, @@ -753,7 +761,7 @@ class NamespaceS3 { dbg.log0('NamespaceS3.get_object_attributes:', this.bucket, inspect(params)); await this._prepare_sts_client(); - /** @type {AWS.S3.GetObjectAttributesRequest} */ + /** @type {import("@aws-sdk/client-s3").GetObjectAttributesRequest} */ const request = { Bucket: this.bucket, Key: params.key, @@ -763,7 +771,7 @@ class NamespaceS3 { this._set_md_conditions(params, request); this._assign_encryption_to_request(params, request); try { - const res = await this.s3.getObjectAttributes(request).promise(); + const res = await this.s3.getObjectAttributes(request); dbg.log0('NamespaceS3.get_object_attributes:', this.bucket, inspect(params), 'res', inspect(res)); return this._get_s3_object_info(res, params.bucket); } catch (err) { @@ -789,12 +797,12 @@ class NamespaceS3 { /** * * @param {Omit, 'ChecksumAlgorithm'>} res * @param {string} bucket * @param {number} [part_number] @@ -806,7 +814,7 @@ class NamespaceS3 { 'noobaa-namespace-s3-bucket': this.bucket, }); const ranges = res.ContentRange ? Number(res.ContentRange.split('/')[1]) : 0; - const size = ranges || res.ContentLength || res.Size || 0; + const size = ranges || res.ContentLength || res.Size || res.ObjectSize || 0; const last_modified_time = res.LastModified ? res.LastModified.getTime() : Date.now(); return { obj_id: res.UploadId || etag, diff --git a/src/sdk/object_sdk.js b/src/sdk/object_sdk.js index 8d65186a39..9044241cb6 100644 --- a/src/sdk/object_sdk.js +++ b/src/sdk/object_sdk.js @@ -25,6 +25,7 @@ const NamespaceMultipart = require('./namespace_multipart'); const NamespaceNetStorage = require('./namespace_net_storage'); const BucketSpaceNB = require('./bucketspace_nb'); const { RpcError } = require('../rpc'); +const noobaa_s3_client = require('../sdk/noobaa_s3_client/noobaa_s3_client'); const anonymous_access_key = Symbol('anonymous_access_key'); @@ -457,25 +458,22 @@ class ObjectSDK { r.endpoint_type === 'FLASHBLADE' || r.endpoint_type === 'IBM_COS') { - const agent = r.endpoint_type === 'AWS' ? - http_utils.get_default_agent(r.endpoint) : - http_utils.get_unsecured_agent(r.endpoint); - return new NamespaceS3({ namespace_resource_id: r.id, s3_params: { - params: { Bucket: r.target_bucket }, endpoint: r.endpoint, aws_sts_arn: r.aws_sts_arn, - accessKeyId: r.access_key.unwrap(), - secretAccessKey: r.secret_key.unwrap(), - // region: 'us-east-1', // TODO needed? - signatureVersion: cloud_utils.get_s3_endpoint_signature_ver(r.endpoint, r.auth_method), - s3ForcePathStyle: true, - // computeChecksums: false, // disabled by default for performance - httpOptions: { agent }, - access_mode: r.access_mode + credentials: { + accessKeyId: r.access_key.unwrap(), + secretAccessKey: r.secret_key.unwrap(), + }, + region: r.region || config.DEFAULT_REGION, // SDKv3 needs region + forcePathStyle: true, + requestHandler: noobaa_s3_client.get_requestHandler_with_suitable_agent(r.endpoint), + requestChecksumCalculation: 'WHEN_REQUIRED', + access_mode: r.access_mode, }, + bucket: r.target_bucket, stats: this.stats, }); } diff --git a/src/test/integration_tests/api/s3/test_s3_ops.js b/src/test/integration_tests/api/s3/test_s3_ops.js index 7f939b86ad..5fe5c19032 100644 --- a/src/test/integration_tests/api/s3/test_s3_ops.js +++ b/src/test/integration_tests/api/s3/test_s3_ops.js @@ -71,7 +71,6 @@ mocha.describe('s3_ops', function() { mocha.before(async function() { const self = this; self.timeout(60000); - const account_info = await rpc_client.account.read_account({ email: EMAIL, }); s3_client_params = { endpoint: coretest.get_http_address(), diff --git a/src/test/unit_tests/util_functions_tests/test_cloud_utils.js b/src/test/unit_tests/util_functions_tests/test_cloud_utils.js index e3e1d9ff80..1903ec691b 100644 --- a/src/test/unit_tests/util_functions_tests/test_cloud_utils.js +++ b/src/test/unit_tests/util_functions_tests/test_cloud_utils.js @@ -4,7 +4,6 @@ const mocha = require('mocha'); const assert = require('assert'); const sinon = require('sinon'); -const AWS = require('aws-sdk'); const cloud_utils = require('../../../util/cloud_utils'); const dbg = require('../../../util/debug_module')(__filename); const { STSClient } = require('@aws-sdk/client-sts'); @@ -15,71 +14,7 @@ const fakeAccessKeyId = "fakeAccessKeyId"; const fakeSecretAccessKey = "fakeSecretAccessKey"; const fakeSessionToken = "fakeSessionToken"; const roleArn = "arn:aws:iam::261532230807:role/noobaa_s3_sts"; -const defaultSTSCredsValidity = 3600; const REGION = "us-east-1"; -const expectedParams = [{ - RoleArn: roleArn, - RoleSessionName: 'testSession', - WebIdentityToken: 'web-identity-token', - DurationSeconds: defaultSTSCredsValidity, -}]; - -mocha.describe('AWS STS tests', function() { - let STSStub; - let stsFake; - mocha.before('Creating STS stub', function() { - - sinon.stub(fs.promises, "readFile") - .withArgs(projectedServiceAccountToken) - .returns("web-identity-token"); - - stsFake = { - assumeRoleWithWebIdentity: sinon.stub().returnsThis(), - promise: sinon.stub() - .resolves({ - Credentials: { - AccessKeyId: fakeAccessKeyId, - SecretAccessKey: fakeSecretAccessKey, - SessionToken: fakeSessionToken - } - }), - }; - STSStub = sinon.stub(AWS, 'STS') - .callsFake(() => stsFake); - }); - mocha.after('Restoring STS stub', function() { - STSStub.restore(); - fs.promises.readFile.restore?.(); - }); - mocha.it('should generate aws sts creds', async function() { - const params = { - aws_sts_arn: roleArn - }; - const roleSessionName = "testSession"; - const json = await cloud_utils.generate_aws_sts_creds(params, roleSessionName); - sinon.assert.calledOnce(STSStub); - sinon.assert.calledWith(stsFake.assumeRoleWithWebIdentity, ...expectedParams); - assert.equal(json.accessKeyId, fakeAccessKeyId); - assert.equal(json.secretAccessKey, fakeSecretAccessKey); - assert.equal(json.sessionToken, fakeSessionToken); - dbg.log0('test.aws.sts.assumeRoleWithWebIdentity: ', json); - }); - mocha.it('should generate an STS S3 client', async function() { - const params = { - aws_sts_arn: roleArn, - region: 'us-east-1' - }; - const additionalParams = { - RoleSessionName: 'testSession' - }; - const s3 = await cloud_utils.createSTSS3Client(params, additionalParams); - dbg.log0('test.aws.sts.createSTSS3Client: ', s3); - assert.equal(s3.config.credentials.accessKeyId, fakeAccessKeyId); - assert.equal(s3.config.credentials.secretAccessKey, fakeSecretAccessKey); - assert.equal(s3.config.credentials.sessionToken, fakeSessionToken); - assert.equal(s3.config.region, 'us-east-1'); - }); -}); mocha.describe('AWS STS SDK V3 tests', function() { let sts_v3_stub; diff --git a/src/util/cloud_utils.js b/src/util/cloud_utils.js index 59cbe3fd9d..4ac25c1f5c 100644 --- a/src/util/cloud_utils.js +++ b/src/util/cloud_utils.js @@ -31,38 +31,6 @@ function find_cloud_connection(account, conn_name) { return conn; } -async function createSTSS3Client(params, additionalParams) { - const creds = await generate_aws_sts_creds(params, additionalParams.RoleSessionName); - return new AWS.S3({ - credentials: creds, - region: params.region, - endpoint: additionalParams.endpoint, - signatureVersion: additionalParams.signatureVersion, - s3DisableBodySigning: additionalParams.s3DisableBodySigning, - httpOptions: additionalParams.httpOptions, - s3ForcePathStyle: additionalParams.s3ForcePathStyle - }); -} - -async function generate_aws_sts_creds(params, roleSessionName) { - const sts = new AWS.STS(); - const creds = await (sts.assumeRoleWithWebIdentity({ - RoleArn: params.aws_sts_arn, - RoleSessionName: roleSessionName || defaultRoleSessionName, - WebIdentityToken: (await fs.promises.readFile(projectedServiceAccountToken)).toString(), - DurationSeconds: defaultSTSCredsValidity - }).promise()); - if (_.isEmpty(creds.Credentials)) { - dbg.error(`AWS STS empty creds ${params.RoleArn}, RolesessionName: ${params.RoleSessionName},Projected service Account Token Path : ${projectedServiceAccountToken}`); - throw new RpcError('AWS_STS_ERROR', 'Empty AWS STS creds retrieved for Role "' + params.RoleArn + '"'); - } - return new AWS.Credentials( - creds.Credentials.AccessKeyId, - creds.Credentials.SecretAccessKey, - creds.Credentials.SessionToken - ); -} - async function createSTSS3SDKv3Client(params, additionalParams) { const creds = await generate_aws_sdkv3_sts_creds(params, additionalParams.RoleSessionName); return new S3({ @@ -247,8 +215,6 @@ exports.get_s3_endpoint_signature_ver = get_s3_endpoint_signature_ver; exports.is_aws_endpoint = is_aws_endpoint; exports.disable_s3_compatible_bodysigning = disable_s3_compatible_bodysigning; exports.set_noobaa_s3_connection = set_noobaa_s3_connection; -exports.createSTSS3Client = createSTSS3Client; -exports.generate_aws_sts_creds = generate_aws_sts_creds; exports.generate_access_keys = generate_access_keys; exports.createSTSS3SDKv3Client = createSTSS3SDKv3Client; exports.generate_aws_sdkv3_sts_creds = generate_aws_sdkv3_sts_creds;