Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ jobs:
with:
node-version: lts/*
- name: Setup PNPM
uses: pnpm/action-setup@v3
uses: pnpm/action-setup@v4
with:
version: latest
run_install: true
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -137,4 +137,4 @@ How long keep records of requests in milliseconds. If provided, it overrides the
**async-ratelimiter** © [microlink.io](https://microlink.io), released under the [MIT](https://github.com/microlinkhq/async-ratelimiter/blob/master/LICENSE.md) License.<br>
Authored and maintained by [Kiko Beats](https://kikobeats.com) with help from [contributors](https://github.com/microlinkhq/async-ratelimiter/contributors).

> [microlink.io](https://microlink.io) · GitHub [microlink.io](https://github.com/microlinkhq) · Twitter [@microlinkhq](https://twitter.com/microlinkhq)
> [microlink.io](https://microlink.io) · GitHub [microlink.io](https://github.com/microlinkhq) · X [@microlinkhq](https://x.com/microlinkhq)
187 changes: 187 additions & 0 deletions benchmark/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,187 @@
'use strict'

const RateLimiter = require('..')
const Redis = require('ioredis')
const { performance } = require('perf_hooks')

// Configuration
const CONFIG = {
// Benchmark settings
iterations: 100000,
concurrency: 100,
warmup: 1000,
// Rate limiter settings
maxRequests: 100,
duration: 60, // seconds
// Distribution settings
ipCount: 200,
hotIpPercentage: 20, // percentage of requests that hit "hot" IPs
hotIpCount: 10,
// Redis settings
redisOptions: {
host: 'localhost',
port: 6379
}
}

// Generate test IPs
function generateIps () {
const ips = []
// Regular IPs
for (let i = 0; i < CONFIG.ipCount; i++) {
ips.push(`192.168.1.${i % 255}`)
}
// Hot IPs (will be rate limited)
const hotIps = []
for (let i = 0; i < CONFIG.hotIpCount; i++) {
hotIps.push(`10.0.0.${i % 255}`)
}

return { ips, hotIps }
}

// Select an IP based on our distribution
function selectIp (ips, hotIps) {
// Determine if this request should use a hot IP
const useHotIp = Math.random() * 100 < CONFIG.hotIpPercentage

if (useHotIp) {
return hotIps[Math.floor(Math.random() * hotIps.length)]
} else {
return ips[Math.floor(Math.random() * ips.length)]
}
}

// Run the benchmark
async function runBenchmark () {
console.log('=== Async RateLimiter Benchmark ===')
console.log(`Iterations: ${CONFIG.iterations}`)
console.log(`Concurrency: ${CONFIG.concurrency}`)
console.log(`Rate limit: ${CONFIG.maxRequests} requests per ${CONFIG.duration} seconds`)
console.log(
`IP distribution: ${CONFIG.ipCount} IPs (${CONFIG.hotIpCount} hot IPs receiving ${CONFIG.hotIpPercentage}% of traffic)`
)
console.log(`Redis: ${CONFIG.redisOptions.host}:${CONFIG.redisOptions.port}`)
console.log('-----------------------------------')

try {
// Connect to Redis using ioredis
const redis = new Redis(CONFIG.redisOptions)

// Create rate limiter
const limiter = new RateLimiter({
db: redis,
max: CONFIG.maxRequests,
duration: CONFIG.duration
})

// Generate IPs
const { ips, hotIps } = generateIps()

// Warmup
console.log(`Warming up with ${CONFIG.warmup} requests...`)
for (let i = 0; i < CONFIG.warmup; i++) {
const ip = selectIp(ips, hotIps)
await limiter.get({ id: ip })
}

// Reset Redis for accurate measurement
console.log('Resetting Redis before benchmark...')
await redis.flushdb()

// Wait a moment for Redis to settle
await new Promise(resolve => setTimeout(resolve, 1000))

// Run benchmark
console.log(`Running ${CONFIG.iterations} iterations...`)

const results = {
totalTime: 0,
successCount: 0,
limitedCount: 0,
latencies: []
}

const start = performance.now()

// Create batches for concurrency
const batchSize = Math.min(CONFIG.concurrency, CONFIG.iterations)
const batches = Math.ceil(CONFIG.iterations / batchSize)

for (let b = 0; b < batches; b++) {
const currentBatchSize = Math.min(batchSize, CONFIG.iterations - b * batchSize)
const promises = []

for (let i = 0; i < currentBatchSize; i++) {
const ip = selectIp(ips, hotIps)

promises.push(
(async () => {
const requestStart = performance.now()
const limit = await limiter.get({ id: ip })
const requestEnd = performance.now()

results.latencies.push(requestEnd - requestStart)

if (limit.remaining > 0) {
results.successCount++
} else {
results.limitedCount++
}
})()
)
}

await Promise.all(promises)

// Show progress
if (batches > 10 && b % Math.floor(batches / 10) === 0) {
const progress = Math.floor((b / batches) * 100)
console.log(`Progress: ${progress}%`)
}
}

const end = performance.now()
results.totalTime = end - start

// Calculate statistics
results.totalRequests = results.successCount + results.limitedCount
results.limitedPercentage = (results.limitedCount / results.totalRequests) * 100
results.averageLatency = results.latencies.reduce((a, b) => a + b, 0) / results.latencies.length

// Sort latencies for percentiles
results.latencies.sort((a, b) => a - b)
results.p50Latency = results.latencies[Math.floor(results.latencies.length * 0.5)]
results.p95Latency = results.latencies[Math.floor(results.latencies.length * 0.95)]
results.p99Latency = results.latencies[Math.floor(results.latencies.length * 0.99)]

results.requestsPerSecond = (results.totalRequests / results.totalTime) * 1000

// Print results
console.log('\n=== Benchmark Results ===')
console.log(`Total requests: ${results.totalRequests}`)
console.log(`Successful requests: ${results.successCount}`)
console.log(
`Rate limited requests: ${results.limitedCount} (${results.limitedPercentage.toFixed(2)}%)`
)
console.log(`Total time: ${results.totalTime.toFixed(2)}ms`)
console.log(`Requests per second: ${results.requestsPerSecond.toFixed(2)}`)
console.log('\nLatency:')
console.log(` Average: ${results.averageLatency.toFixed(2)}ms`)
console.log(` p50: ${results.p50Latency.toFixed(2)}ms`)
console.log(` p95: ${results.p95Latency.toFixed(2)}ms`)
console.log(` p99: ${results.p99Latency.toFixed(2)}ms`)

// Clean up
await redis.quit()
} catch (error) {
console.error('Benchmark error:', error)
process.exit(1)
}
}

// Run the benchmark
runBenchmark().catch(err => {
console.error('Unexpected error:', err)
process.exit(1)
})
14 changes: 8 additions & 6 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@
"ioredis": "latest",
"mocha": "latest",
"nano-staged": "latest",
"npm-check-updates": "latest",
"should": "latest",
"simple-git-hooks": "latest",
"standard": "latest",
Expand All @@ -61,20 +60,23 @@
"contributors": "(npx git-authors-cli && npx finepack && git add package.json && git commit -m 'build: contributors' --no-verify) || true",
"lint": "standard-markdown README.md && standard",
"postrelease": "npm run release:tags && npm run release:github && (ci-publish || npm publish --access=public)",
"prerelease": "npm run update:check && npm run contributors",
"prerelease": "npm run contributors",
"pretest": "npm run lint",
"release": "standard-version -a",
"release:github": "github-generate-release",
"release:tags": "git push --follow-tags origin HEAD:master",
"test": "c8 mocha --exit",
"update": "ncu -u",
"update:check": "ncu -- --error-level 2"
"test": "c8 mocha --exit"
},
"license": "MIT",
"commitlint": {
"extends": [
"@commitlint/config-conventional"
]
],
"rules": {
"body-max-line-length": [
0
]
}
},
"nano-staged": {
"*.js": [
Expand Down
104 changes: 80 additions & 24 deletions src/index.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
'use strict'

const assert = require('assert')

const microtime = require('./microtime')

module.exports = class Limiter {
Expand All @@ -12,38 +11,95 @@ module.exports = class Limiter {
this.max = max
this.duration = duration
this.namespace = namespace

this.db.defineCommand('ratelimiter', {
numberOfKeys: 1,
lua: `
local key = KEYS[1]
local now = tonumber(ARGV[1])
local duration = tonumber(ARGV[2])
local max = tonumber(ARGV[3])
local start = now - duration

-- Check if the key exists
local exists = redis.call('EXISTS', key)

local count = 0
local oldest = now

if exists == 1 then
-- Remove expired entries based on the current duration
redis.call('ZREMRANGEBYSCORE', key, 0, start)

-- Get count
count = redis.call('ZCARD', key)

-- Get oldest timestamp if we have entries
if count > 0 then
local oldest_result = redis.call('ZRANGE', key, 0, 0)
oldest = tonumber(oldest_result[1])
end
end

-- Calculate remaining (before adding current request)
local remaining = max - count

-- Early return if already at limit
if remaining <= 0 then
local resetMicro = oldest + duration
return {0, math.floor(resetMicro / 1000), max}
end

-- Add current request with current timestamp
redis.call('ZADD', key, now, now)

-- Calculate reset time and handle trimming if needed
local resetMicro

-- Only perform trim if we're at or over max (based on count before adding)
if count >= max then
-- Get the entry at position -max for reset time calculation
local oldest_in_range_result = redis.call('ZRANGE', key, -max, -max)
local oldestInRange = oldest

if #oldest_in_range_result > 0 then
oldestInRange = tonumber(oldest_in_range_result[1])
end

-- Trim the set
redis.call('ZREMRANGEBYRANK', key, 0, -(max + 1))

-- Calculate reset time based on the entry at position -max
resetMicro = oldestInRange + duration
else
-- We're under the limit, use the oldest entry for reset time
resetMicro = oldest + duration
end

-- Set expiration using the provided duration
redis.call('PEXPIRE', key, duration)

return {remaining, math.floor(resetMicro / 1000), max}
`
})
}

async get ({ id = this.id, max = this.max, duration = this.duration } = {}) {
assert(id, 'id required')
assert(max, 'max required')
assert(duration, 'duration required')

const key = `${this.namespace}:${id}`
const now = microtime.now()
const start = now - duration * 1000

const operations = [
['zremrangebyscore', key, 0, start],
['zcard', key],
['zadd', key, now, now],
['zrange', key, 0, 0],
['zrange', key, -max, -max],
['zremrangebyrank', key, 0, -(max + 1)],
['pexpire', key, duration]
]

const res = await this.db.multi(operations).exec()

const count = Number(res[1][1])
const oldest = Number(res[3][1][0])
const oldestInRange = Number(res[4][1][0])
const resetMicro = (Number.isNaN(oldestInRange) ? oldest : oldestInRange) + duration * 1000
const result = await this.db.ratelimiter(
`${this.namespace}:${id}`,
microtime.now(),
duration,
max
)

return {
remaining: count < max ? max - count : 0,
reset: Math.floor(resetMicro / 1000000),
total: max
remaining: result[0],
reset: Math.floor(result[1]),
total: result[2]
}
}
}
5 changes: 3 additions & 2 deletions src/microtime.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
'use strict'

const time = Date.now() * 1e3
const time = Date.now()
const start = process.hrtime.bigint()

module.exports.now = () => time + Number(process.hrtime.bigint() - start) * 1e-3
// Return high-precision timestamp in milliseconds
module.exports.now = () => time + Number(process.hrtime.bigint() - start) / 1e6
Loading