Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
127 changes: 127 additions & 0 deletions spec/RateLimit.spec.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,12 @@
const RedisCacheAdapter = require('../lib/Adapters/Cache/RedisCacheAdapter').default;
const request = require('../lib/request');

const headers = {
'Content-Type': 'application/json',
'X-Parse-Application-Id': 'test',
'X-Parse-REST-API-Key': 'rest',
};

describe('rate limit', () => {
it('can limit cloud functions', async () => {
Parse.Cloud.define('test', () => 'Abc');
Expand Down Expand Up @@ -487,6 +495,125 @@ describe('rate limit', () => {
})
).toBeRejectedWith(`Invalid rate limit option "path"`);
});
describe('batch', () => {
it('should reject batch request when sub-requests exceed rate limit for a path', async () => {
await reconfigureServer({
rateLimit: [
{
requestPath: '/classes/*path',
requestTimeWindow: 10000,
requestCount: 2,
errorResponseMessage: 'Too many requests',
includeInternalRequests: true,
},
],
});
const response = await request({
method: 'POST',
headers: headers,
url: 'http://localhost:8378/1/batch',
body: JSON.stringify({
requests: [
{ method: 'POST', path: '/1/classes/MyObject', body: { key: 'value1' } },
{ method: 'POST', path: '/1/classes/MyObject', body: { key: 'value2' } },
{ method: 'POST', path: '/1/classes/MyObject', body: { key: 'value3' } },
],
}),
}).catch(e => e);
expect(response.data).toEqual({
code: Parse.Error.CONNECTION_FAILED,
error: 'Batch request exceeds rate limit for endpoint',
});
});

it('should allow batch request when sub-requests are within rate limit', async () => {
await reconfigureServer({
rateLimit: [
{
requestPath: '/classes/*path',
requestTimeWindow: 10000,
requestCount: 5,
errorResponseMessage: 'Too many requests',
includeInternalRequests: true,
},
],
});
const response = await request({
method: 'POST',
headers: headers,
url: 'http://localhost:8378/1/batch',
body: JSON.stringify({
requests: [
{ method: 'POST', path: '/1/classes/MyObject', body: { key: 'value1' } },
{ method: 'POST', path: '/1/classes/MyObject', body: { key: 'value2' } },
{ method: 'POST', path: '/1/classes/MyObject', body: { key: 'value3' } },
],
}),
});
expect(response.data.length).toBe(3);
expect(response.data[0].success).toBeDefined();
});

it('should reject batch when sub-requests for one rate-limited path exceed limit among mixed paths', async () => {
await reconfigureServer({
rateLimit: [
{
requestPath: '/login',
requestTimeWindow: 10000,
requestCount: 1,
errorResponseMessage: 'Too many login requests',
includeInternalRequests: true,
},
],
});
await Parse.User.signUp('testuser', 'password');
const response = await request({
method: 'POST',
headers: headers,
url: 'http://localhost:8378/1/batch',
body: JSON.stringify({
requests: [
{ method: 'POST', path: '/1/classes/MyObject', body: { key: 'value1' } },
{ method: 'POST', path: '/1/login', body: { username: 'testuser', password: 'password' } },
{ method: 'POST', path: '/1/login', body: { username: 'testuser', password: 'wrong' } },
],
}),
}).catch(e => e);
expect(response.data).toEqual({
code: Parse.Error.CONNECTION_FAILED,
error: 'Batch request exceeds rate limit for endpoint',
});
});

it('should not reject batch when sub-requests target non-rate-limited paths', async () => {
await reconfigureServer({
rateLimit: [
{
requestPath: '/login',
requestTimeWindow: 10000,
requestCount: 1,
errorResponseMessage: 'Too many login requests',
includeInternalRequests: true,
},
],
});
const response = await request({
method: 'POST',
headers: headers,
url: 'http://localhost:8378/1/batch',
body: JSON.stringify({
requests: [
{ method: 'POST', path: '/1/classes/MyObject', body: { key: 'value1' } },
{ method: 'POST', path: '/1/classes/MyObject', body: { key: 'value2' } },
{ method: 'POST', path: '/1/classes/MyObject', body: { key: 'value3' } },
],
}),
});
expect(response.data.length).toBe(3);
expect(response.data[0].success).toBeDefined();
});
});

describe_only(() => {
return process.env.PARSE_SERVER_TEST_CACHE === 'redis';
})('with RedisCache', function () {
Expand Down
2 changes: 1 addition & 1 deletion src/Options/Definitions.js
Original file line number Diff line number Diff line change
Expand Up @@ -637,7 +637,7 @@ module.exports.RateLimitOptions = {
},
requestCount: {
env: 'PARSE_SERVER_RATE_LIMIT_REQUEST_COUNT',
help: 'The number of requests that can be made per IP address within the time window set in `requestTimeWindow` before the rate limit is applied.',
help: 'The number of requests that can be made per IP address within the time window set in `requestTimeWindow` before the rate limit is applied. For batch requests, this also limits the number of sub-requests in a single batch that target this path; however, requests already consumed in the current time window are not counted against the batch, so the effective limit may be higher when combining individual and batch requests. Note that this is a basic server-level rate limit; for comprehensive protection, use a reverse proxy or WAF for rate limiting.',
action: parsers.numberParser('requestCount'),
},
requestMethods: {
Expand Down
2 changes: 1 addition & 1 deletion src/Options/docs.js

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion src/Options/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -387,7 +387,7 @@ export interface RateLimitOptions {
requestPath: string;
/* The window of time in milliseconds within which the number of requests set in `requestCount` can be made before the rate limit is applied. */
requestTimeWindow: ?number;
/* The number of requests that can be made per IP address within the time window set in `requestTimeWindow` before the rate limit is applied. */
/* The number of requests that can be made per IP address within the time window set in `requestTimeWindow` before the rate limit is applied. For batch requests, this also limits the number of sub-requests in a single batch that target this path; however, requests already consumed in the current time window are not counted against the batch, so the effective limit may be higher when combining individual and batch requests. Note that this is a basic server-level rate limit; for comprehensive protection, use a reverse proxy or WAF for rate limiting. */
requestCount: ?number;
/* The error message that should be returned in the body of the HTTP 429 response when the rate limit is hit. Default is `Too many requests.`.
:DEFAULT: Too many requests. */
Expand Down
21 changes: 21 additions & 0 deletions src/batch.js
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,27 @@ function handleBatch(router, req) {
req.config.publicServerURL
);

// Check if batch sub-requests would exceed any configured rate limits.
// Count how many sub-requests target each rate-limited path and reject
// the entire batch if any path's count exceeds its requestCount.
const rateLimits = req.config.rateLimits || [];
for (const limit of rateLimits) {
const pathExp = limit.path.regexp || limit.path;
let matchCount = 0;
for (const restRequest of req.body.requests) {
const routablePath = makeRoutablePath(restRequest.path);
if (pathExp.test(routablePath)) {
matchCount++;
}
}
if (matchCount > limit.requestCount) {
throw new Parse.Error(
Parse.Error.CONNECTION_FAILED,
'Batch request exceeds rate limit for endpoint'
);
Comment thread
coderabbitai[bot] marked this conversation as resolved.
}
}

const batch = transactionRetries => {
let initialPromise = Promise.resolve();
if (req.body?.transaction === true) {
Expand Down
1 change: 1 addition & 0 deletions src/middlewares.js
Original file line number Diff line number Diff line change
Expand Up @@ -584,6 +584,7 @@ export const addRateLimit = (route, config, cloud) => {
}
config.rateLimits.push({
path: pathToRegexp(route.requestPath),
requestCount: route.requestCount,
handler: rateLimit({
windowMs: route.requestTimeWindow,
max: route.requestCount,
Expand Down
Loading