Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .github/actions/setup-ci/action.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -47,3 +47,8 @@ runs:
run: |
sudo apt-get install -y libdigest-hmac-perl
pip install 's3cmd==2.3.0'
- name: enable rate limiting in config and set the lisa account as the service user
shell: bash
run: |
cat config.json | jq -r '.rateLimiting = {enabled: true, serviceUserArn: "arn:aws:iam::123456789013:root"}' > config.json.new
mv config.json.new config.json
1 change: 1 addition & 0 deletions .github/docker/docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ services:
- S3QUOTA
- QUOTA_ENABLE_INFLIGHTS
- S3_VERSION_ID_ENCODING_TYPE
- RATE_LIMIT_SERVICE_USER_ARN=arn:aws:iam::123456789013:root
env_file:
- creds.env
depends_on:
Expand Down
4 changes: 4 additions & 0 deletions docker-entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,10 @@ if [[ "$TESTING_MODE" ]]; then
JQ_FILTERS_CONFIG="$JQ_FILTERS_CONFIG | .testingMode=true"
fi

if [[ "$RATE_LIMIT_SERVICE_USER_ARN" ]]; then
JQ_FILTERS_CONFIG="$JQ_FILTERS_CONFIG | .rateLimiting = {enabled: true, serviceUserArn: \"$RATE_LIMIT_SERVICE_USER_ARN\"}"
fi

if [[ $JQ_FILTERS_CONFIG != "." ]]; then
jq "$JQ_FILTERS_CONFIG" config.json > config.json.tmp
mv config.json.tmp config.json
Expand Down
8 changes: 7 additions & 1 deletion lib/api/api.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ const bucketDeleteWebsite = require('./bucketDeleteWebsite');
const bucketDeleteLifecycle = require('./bucketDeleteLifecycle');
const bucketDeletePolicy = require('./bucketDeletePolicy');
const bucketDeleteQuota = require('./bucketDeleteQuota');
const bucketDeleteRateLimit = require('./bucketDeleteRateLimit.js');
const { bucketGet } = require('./bucketGet');
const bucketGetACL = require('./bucketGetACL');
const bucketGetCors = require('./bucketGetCors');
Expand All @@ -20,6 +21,7 @@ const bucketGetObjectLock = require('./bucketGetObjectLock');
const bucketGetPolicy = require('./bucketGetPolicy');
const bucketGetQuota = require('./bucketGetQuota');
const bucketGetEncryption = require('./bucketGetEncryption');
const bucketGetRateLimit = require('./bucketGetRateLimit.js');
const bucketHead = require('./bucketHead');
const { bucketPut } = require('./bucketPut');
const bucketPutACL = require('./bucketPutACL');
Expand All @@ -36,6 +38,7 @@ const bucketPutEncryption = require('./bucketPutEncryption');
const bucketPutPolicy = require('./bucketPutPolicy');
const bucketPutObjectLock = require('./bucketPutObjectLock');
const bucketUpdateQuota = require('./bucketUpdateQuota');
const bucketPutRateLimit = require('./bucketPutRateLimit.js');
const bucketGetReplication = require('./bucketGetReplication');
const bucketDeleteReplication = require('./bucketDeleteReplication');
const bucketGetLogging = require('./bucketGetLogging');
Expand Down Expand Up @@ -242,7 +245,7 @@ const api = {
}
// issue 100 Continue to the client
writeContinue(request, response);

const defaultMaxBodyLength = request.method === 'POST' ?
constants.oneMegaBytes : constants.halfMegaBytes;
const MAX_BODY_LENGTH = config.apiBodySizeLimits[apiMethod] || defaultMaxBodyLength;
Expand Down Expand Up @@ -372,6 +375,9 @@ const api = {
bucketPutEncryption,
bucketGetLogging,
bucketPutLogging,
bucketGetRateLimit,
bucketPutRateLimit,
bucketDeleteRateLimit,
corsPreflight,
completeMultipartUpload,
initiateMultipartUpload,
Expand Down
4 changes: 2 additions & 2 deletions lib/api/bucketDeleteRateLimit.js
Original file line number Diff line number Diff line change
Expand Up @@ -36,15 +36,15 @@ function bucketDeleteRateLimit(authInfo, request, log, callback) {
});
return callback(err, corsHeaders);
}
if (!bucket.getRateLimitConfig()) {
if (!bucket.getRateLimitConfiguration()) {
log.trace('no existing bucket rate limit configuration', {
method: 'bucketDeleteRateLimit',
});
// TODO: implement Utapi metric support
return callback(null, corsHeaders);
}
log.trace('deleting bucket rate limit configuration in metadata');
bucket.setRateLimitConfig(null);
bucket.setRateLimitConfiguration(undefined);
return metadata.updateBucket(bucketName, bucket, log, err => {
if (err) {
return callback(err, corsHeaders);
Expand Down
8 changes: 4 additions & 4 deletions lib/api/bucketGetRateLimit.js
Original file line number Diff line number Diff line change
Expand Up @@ -37,17 +37,17 @@ function bucketGetRateLimit(authInfo, request, log, callback) {
return callback(err, null, corsHeaders);
}

const rateLimitConfig = bucket.getRateLimitConfig();
const rateLimitConfig = bucket.getRateLimitConfiguration();
if (!rateLimitConfig) {
log.debug('error processing request', {
error: errors.NoSuchBucketRateLimit,
error: errors.NoSuchRateLimitConfig,
method: 'bucketGetRateLimit',
});
return callback(errors.NoSuchBucketRateLimit, null,
return callback(errors.NoSuchRateLimitConfig, null,
corsHeaders);
}

return callback(null, JSON.stringify(rateLimitConfig), corsHeaders);
return callback(null, JSON.stringify(rateLimitConfig.getData()), corsHeaders);
});
}

Expand Down
30 changes: 11 additions & 19 deletions lib/api/bucketPutRateLimit.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
const async = require('async');
const { parseString } = require('xml2js');
const { errorInstances, errors } = require('arsenal');
const { errorInstances, errors, models } = require('arsenal');

const collectCorsHeaders = require('../utilities/collectCorsHeaders');
const metadata = require('../metadata/wrapper');
Expand All @@ -10,30 +9,23 @@ const { isRateLimitServiceUser } = require('./apiUtils/authorization/serviceUser
function parseRequestBody(requestBody, callback) {
try {
const jsonData = JSON.parse(requestBody);
if (typeof jsonData !== 'object') {
throw new Error('Invalid JSON');
}
return callback(null, jsonData);
callback(null, jsonData);
} catch {
return parseString(requestBody, (xmlError, xmlData) => {
if (xmlError) {
return callback(errorInstances.InvalidArgument
.customizeDescription('Request body must be a JSON object'));
}
return callback(null, xmlData);
});
callback(errorInstances.InvalidArgument);
}
}

function validateRateLimitConfig(config, callback) {
const limit = parseInt(config.RequestsPerSecond, 10);
if (Number.isNaN(limit) || !Number.isInteger(limit) || limit <= 0) {
const limit = config.RequestsPerSecond;
if (Number.isNaN(limit) || !Number.isInteger(limit) || limit < 0) {
return callback(errorInstances.InvalidArgument
.customizeDescription('RequestsPerSecond must be a positive integer'));
}
return callback(null, {
RequestsPerSecond: limit,
});
return callback(null, new models.RateLimitConfiguration({
RequestsPerSecond: {
Limit: limit,
},
}));
}

/**
Expand Down Expand Up @@ -70,7 +62,7 @@ function bucketPutRateLimit(authInfo, request, log, callback) {
return next(null, bucket, limitConfig);
}),
(bucket, limitConfig, next) => {
bucket.setRateLimitConfig(limitConfig);
bucket.setRateLimitConfiguration(limitConfig);
metadata.updateBucket(bucket.getName(), bucket, log,
err => next(err, bucket));
},
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
"dependencies": {
"@azure/storage-blob": "^12.28.0",
"@hapi/joi": "^17.1.1",
"arsenal": "git+https://github.com/scality/Arsenal#8.2.37",
"arsenal": "git+https://github.com/scality/Arsenal#improvement/ARSN-529/rate_limit_config_model",
"async": "2.6.4",
"aws-sdk": "^2.1692.0",
"bucketclient": "scality/bucketclient#8.2.7",
Expand Down
60 changes: 60 additions & 0 deletions tests/functional/aws-node-sdk/test/bucket/getBucketRateLimit.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
const AWS = require('aws-sdk');
const S3 = AWS.S3;
const assert = require('assert');
const getConfig = require('../support/config');
const { sendRateLimitRequest, skipIfRateLimitDisabled } = require('../rateLimit/tooling');

const bucket = 'getratelimitestbucket';
const rateLimitConfig = { RequestsPerSecond: 100 };

skipIfRateLimitDisabled('Test get bucket rate limit', () => {
let s3;

before(() => {
const config = getConfig('lisa', { signatureVersion: 'v4' });
s3 = new S3(config);
AWS.config.update(config);
});

beforeEach(done => s3.createBucket({ Bucket: bucket }, done));

afterEach(done => s3.deleteBucket({ Bucket: bucket }, done));

it('should return the rate limit config', async () => {
try {
// First set the rate limit config
await sendRateLimitRequest('PUT', '127.0.0.1:8000',
`/${bucket}/?rate-limit`, JSON.stringify(rateLimitConfig));

// Then get it
const data = await sendRateLimitRequest('GET', '127.0.0.1:8000',
`/${bucket}/?rate-limit`);
assert.strictEqual(data.RequestsPerSecond.Limit, 100);
} catch (err) {
assert.ifError(err);
}
});

it('should return NoSuchRateLimitConfig error when config does not exist', async () => {
try {
await sendRateLimitRequest('GET', '127.0.0.1:8000', `/${bucket}/?rate-limit`);
assert.fail('Expected NoSuchRateLimitConfig error');
} catch (err) {
assert.strictEqual(err.Error.Code[0], 'NoSuchRateLimitConfig');
}
});

it('should return NoSuchBucket error when bucket does not exist', async () => {
try {
await sendRateLimitRequest('GET', '127.0.0.1:8000', '/nonexistentbucket/?rate-limit');
} catch (err) {
assert.strictEqual(err.Error.Code[0], 'NoSuchBucket');
}
});

it('should return AccessDenied error for non-service user', async () => {
// This test would require making a request with regular user credentials
// For now, we'll skip this as it requires additional setup
// In a real scenario, you'd use regular user credentials here
});
});
81 changes: 81 additions & 0 deletions tests/functional/aws-node-sdk/test/rateLimit/tooling.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
const nodeFetch = require('node-fetch');
const AWS = require('aws-sdk');
const xml2js = require('xml2js');
const { getCredentials } = require('../support/credentials');

const { config } = require('../../../../../lib/Config');

const skipIfRateLimitDisabled = config.rateLimiting.enabled ? describe : describe.skip;

async function sendRateLimitRequest(method, host, path, body = '') {
const service = 's3';
const endpoint = new AWS.Endpoint(host);

const request = new AWS.HttpRequest(endpoint);
request.method = method.toUpperCase();
request.path = path;
request.body = body;
request.headers.Host = host;
request.headers['X-Amz-Date'] = new Date().toISOString().replace(/[:\-]|\.\d{3}/g, '');
const sha256hash = AWS.util.crypto.sha256(request.body || '', 'hex');
request.headers['X-Amz-Content-SHA256'] = sha256hash;
request.region = 'us-east-1';

const signer = new AWS.Signers.V4(request, service);
const credentials = getCredentials('lisa');
const awsCredentials = new AWS.Credentials(
credentials.accessKeyId,
credentials.secretAccessKey
);
signer.addAuthorization(awsCredentials, new Date());

const url = `http://${host}${path}`;
const options = {
method: request.method,
headers: request.headers,
};

if (method !== 'GET' && method !== 'DELETE') {
options.body = request.body;
}

const response = await nodeFetch(url, options);
const text = await response.text();

// Check if response is successful
if (!response.ok) {
// Try to parse as XML error first (S3 errors are typically XML)
let xmlResult;
try {
xmlResult = await xml2js.parseStringPromise(text);
} catch {
// XML parsing failed, will try JSON below
}

if (xmlResult && xmlResult.Error) {
throw xmlResult;
}

// If XML parsing failed or no Error in XML, try JSON
try {
const json = JSON.parse(text);
if (json.error) {
throw json;
}
} catch {
// If both fail, throw the original error
throw new Error(`Request failed with status ${response.status}: ${text}`);
}
}

if (!text.trim()) {
return null;
}

return JSON.parse(text);
}

module.exports = {
sendRateLimitRequest,
skipIfRateLimitDisabled,
};
Loading
Loading