Skip to content
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
209 changes: 209 additions & 0 deletions spec/AuthDataUniqueIndex.spec.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,209 @@
'use strict';

const request = require('../lib/request');
const Config = require('../lib/Config');

describe('AuthData Unique Index', () => {
const fakeAuthProvider = {
validateAppId: () => Promise.resolve(),
validateAuthData: () => Promise.resolve(),
};

beforeEach(async () => {
await reconfigureServer({ auth: { fakeAuthProvider } });
});

it('should prevent concurrent signups with the same authData from creating duplicate users', async () => {
const authData = { fakeAuthProvider: { id: 'duplicate-test-id', token: 'token1' } };

// Fire multiple concurrent signup requests with the same authData
const concurrentRequests = Array.from({ length: 5 }, () =>
request({
method: 'POST',
headers: {
'X-Parse-Application-Id': 'test',
'X-Parse-REST-API-Key': 'rest',
'Content-Type': 'application/json',
},
url: 'http://localhost:8378/1/users',
body: { authData },
}).then(
response => ({ success: true, data: response.data }),
error => ({ success: false, error: error.data || error.message })
)
);

const results = await Promise.all(concurrentRequests);
const successes = results.filter(r => r.success);
const failures = results.filter(r => !r.success);

// All should either succeed (returning the same user) or fail with "this auth is already used"
// The key invariant: only ONE unique objectId should exist
const uniqueObjectIds = new Set(successes.map(r => r.data.objectId));
expect(uniqueObjectIds.size).toBe(1);

// Failures should be "this auth is already used" errors
for (const failure of failures) {
expect(failure.error.code).toBe(208);
expect(failure.error.error).toBe('this auth is already used');
}

// Verify only one user exists in the database with this authData
const query = new Parse.Query('_User');
query.equalTo('authData.fakeAuthProvider.id', 'duplicate-test-id');
const users = await query.find({ useMasterKey: true });
expect(users.length).toBe(1);
});

it('should prevent concurrent signups via batch endpoint with same authData', async () => {
const authData = { fakeAuthProvider: { id: 'batch-race-test-id', token: 'token1' } };

const response = await request({
method: 'POST',
headers: {
'X-Parse-Application-Id': 'test',
'X-Parse-REST-API-Key': 'rest',
'Content-Type': 'application/json',
},
url: 'http://localhost:8378/1/batch',
body: {
requests: Array.from({ length: 3 }, () => ({
method: 'POST',
path: '/1/users',
body: { authData },
})),
},
});

const results = response.data;
const successes = results.filter(r => r.success);
const failures = results.filter(r => r.error);

// All successes should reference the same user
const uniqueObjectIds = new Set(successes.map(r => r.success.objectId));
expect(uniqueObjectIds.size).toBe(1);

// Failures should be "this auth is already used" errors
for (const failure of failures) {
expect(failure.error.code).toBe(208);
expect(failure.error.error).toBe('this auth is already used');
}

// Verify only one user exists in the database with this authData
const query = new Parse.Query('_User');
query.equalTo('authData.fakeAuthProvider.id', 'batch-race-test-id');
const users = await query.find({ useMasterKey: true });
expect(users.length).toBe(1);
});

it('should allow sequential signups with different authData IDs', async () => {
const user1 = await Parse.User.logInWith('fakeAuthProvider', {
authData: { id: 'user-id-1', token: 'token1' },
});
const user2 = await Parse.User.logInWith('fakeAuthProvider', {
authData: { id: 'user-id-2', token: 'token2' },
});

expect(user1.id).toBeDefined();
expect(user2.id).toBeDefined();
expect(user1.id).not.toBe(user2.id);
});

it('should still allow login with authData after successful signup', async () => {
const authPayload = { authData: { id: 'login-test-id', token: 'token1' } };

// Signup
const user1 = await Parse.User.logInWith('fakeAuthProvider', authPayload);
expect(user1.id).toBeDefined();

// Login again with same authData — should return same user
const user2 = await Parse.User.logInWith('fakeAuthProvider', authPayload);
expect(user2.id).toBe(user1.id);
});

it('should skip startup index creation when createIndexAuthDataUniqueness is false', async () => {
await reconfigureServer({
auth: { fakeAuthProvider },
databaseAdapter: undefined,
databaseOptions: { createIndexAuthDataUniqueness: false },
});
const config = Config.get('test');
const adapter = config.database.adapter;
const spy = spyOn(adapter, 'ensureAuthDataUniqueness').and.callThrough();

// Trigger performInitialization again to verify the option is respected
await config.database.performInitialization();
expect(spy).not.toHaveBeenCalled();
});

it('should handle calling ensureAuthDataUniqueness multiple times via cache', async () => {
const config = Config.get('test');
const adapter = config.database.adapter;

// First call creates the index
await adapter.ensureAuthDataUniqueness('fakeAuthProvider');
// Second call should be a cache hit (no DB call)
await adapter.ensureAuthDataUniqueness('fakeAuthProvider');
expect(adapter._authDataUniqueIndexes.has('fakeAuthProvider')).toBe(true);
});

it('should log warning when index creation fails due to existing duplicates', async () => {
const config = Config.get('test');
const adapter = config.database.adapter;

// Clear cache to force index creation attempt
if (adapter._authDataUniqueIndexes) {
adapter._authDataUniqueIndexes.clear();
}

// Spy on the adapter to simulate a duplicate value error
spyOn(adapter, 'ensureAuthDataUniqueness').and.callFake(() => {
return Promise.reject(
new Parse.Error(Parse.Error.DUPLICATE_VALUE, 'duplicates exist')
);
});

const logSpy = spyOn(require('../lib/logger').logger, 'warn');

// Re-run performInitialization — should warn but not throw
await config.database.performInitialization();
expect(logSpy).toHaveBeenCalledWith(
jasmine.stringContaining('Unable to ensure uniqueness for auth data provider'),
jasmine.anything()
);
});

it('should prevent concurrent signups with same anonymous authData', async () => {
const anonymousId = 'anon-race-test-id';
const authData = { anonymous: { id: anonymousId } };

const concurrentRequests = Array.from({ length: 5 }, () =>
request({
method: 'POST',
headers: {
'X-Parse-Application-Id': 'test',
'X-Parse-REST-API-Key': 'rest',
'Content-Type': 'application/json',
},
url: 'http://localhost:8378/1/users',
body: { authData },
}).then(
response => ({ success: true, data: response.data }),
error => ({ success: false, error: error.data || error.message })
)
);

const results = await Promise.all(concurrentRequests);
const successes = results.filter(r => r.success);

// All successes should reference the same user
const uniqueObjectIds = new Set(successes.map(r => r.data.objectId));
expect(uniqueObjectIds.size).toBe(1);

// Verify only one user exists in the database with this authData
const query = new Parse.Query('_User');
query.equalTo('authData.anonymous.id', anonymousId);
const users = await query.find({ useMasterKey: true });
expect(users.length).toBe(1);
Comment thread
coderabbitai[bot] marked this conversation as resolved.
});
});
43 changes: 43 additions & 0 deletions src/Adapters/Storage/Mongo/MongoStorageAdapter.js
Original file line number Diff line number Diff line change
Expand Up @@ -582,6 +582,13 @@ export class MongoStorageAdapter implements StorageAdapter {
if (matches && Array.isArray(matches)) {
err.userInfo = { duplicated_field: matches[1] };
}
// Check for authData unique index violations
if (!err.userInfo) {
const authDataMatch = error.message.match(/index:\s+(_auth_data_[a-zA-Z0-9_]+_id)/);
if (authDataMatch) {
err.userInfo = { duplicated_field: authDataMatch[1] };
}
}
Comment thread
coderabbitai[bot] marked this conversation as resolved.
}
throw err;
}
Expand Down Expand Up @@ -818,6 +825,42 @@ export class MongoStorageAdapter implements StorageAdapter {
.catch(err => this.handleError(err));
}

// Creates a unique sparse index on _auth_data_<provider>.id to prevent
// race conditions during concurrent signups with the same authData.
ensureAuthDataUniqueness(provider: string) {
if (!this._authDataUniqueIndexes) {
this._authDataUniqueIndexes = new Set();
}
if (this._authDataUniqueIndexes.has(provider)) {
return Promise.resolve();
}
return this._adaptiveCollection('_User')
.then(collection =>
collection._mongoCollection.createIndex(
{ [`_auth_data_${provider}.id`]: 1 },
{ unique: true, sparse: true, background: true, name: `_auth_data_${provider}_id` }
)
)
.then(() => {
this._authDataUniqueIndexes.add(provider);
})
.catch(error => {
if (error.code === 11000) {
throw new Parse.Error(
Parse.Error.DUPLICATE_VALUE,
'Tried to ensure field uniqueness for a class that already has duplicates.'
);
}
// Ignore "index already exists with same name" or "index already exists with different options"
if (error.code === 85 || error.code === 86) {
this._authDataUniqueIndexes.add(provider);
return;
}
throw error;
})
.catch(err => this.handleError(err));
Comment thread
coderabbitai[bot] marked this conversation as resolved.
}
Comment thread
coderabbitai[bot] marked this conversation as resolved.

// Used in tests
_rawFind(className: string, query: QueryType) {
return this._adaptiveCollection(className)
Expand Down
44 changes: 41 additions & 3 deletions src/Adapters/Storage/Postgres/PostgresStorageAdapter.js
Original file line number Diff line number Diff line change
Expand Up @@ -1479,9 +1479,15 @@ export class PostgresStorageAdapter implements StorageAdapter {
);
err.underlyingError = error;
if (error.constraint) {
const matches = error.constraint.match(/unique_([a-zA-Z]+)/);
if (matches && Array.isArray(matches)) {
err.userInfo = { duplicated_field: matches[1] };
// Check for authData unique index violations first
const authDataMatch = error.constraint.match(/_User_unique_authData_([a-zA-Z0-9_]+)_id/);
if (authDataMatch) {
err.userInfo = { duplicated_field: `_auth_data_${authDataMatch[1]}` };
} else {
const matches = error.constraint.match(/unique_([a-zA-Z]+)/);
if (matches && Array.isArray(matches)) {
err.userInfo = { duplicated_field: matches[1] };
}
}
Comment thread
coderabbitai[bot] marked this conversation as resolved.
}
error = err;
Expand Down Expand Up @@ -2052,6 +2058,38 @@ export class PostgresStorageAdapter implements StorageAdapter {
});
}

// Creates a unique index on authData-><provider>->>'id' to prevent
// race conditions during concurrent signups with the same authData.
async ensureAuthDataUniqueness(provider: string) {
Comment thread
coderabbitai[bot] marked this conversation as resolved.
if (!this._authDataUniqueIndexes) {
this._authDataUniqueIndexes = new Set();
}
if (this._authDataUniqueIndexes.has(provider)) {
return;
}
const indexName = `_User_unique_authData_${provider}_id`;
const qs = `CREATE UNIQUE INDEX IF NOT EXISTS $1:name ON "_User" (("authData"->$2::text->>'id')) WHERE "authData"->$2::text->>'id' IS NOT NULL`;
await this._client.none(qs, [indexName, provider]).catch(error => {
if (
error.code === PostgresDuplicateRelationError &&
error.message.includes(indexName)
) {
// Index already exists. Ignore error.
} else if (
error.code === PostgresUniqueIndexViolationError &&
error.message.includes(indexName)
) {
throw new Parse.Error(
Parse.Error.DUPLICATE_VALUE,
'Tried to ensure field uniqueness for a class that already has duplicates.'
);
} else {
throw error;
}
});
this._authDataUniqueIndexes.add(provider);
Comment thread
coderabbitai[bot] marked this conversation as resolved.
Outdated
}
Comment thread
mtrezza marked this conversation as resolved.

// Executes a count.
async count(
className: string,
Expand Down
24 changes: 24 additions & 0 deletions src/Controllers/DatabaseController.js
Original file line number Diff line number Diff line change
Expand Up @@ -1850,6 +1850,30 @@ class DatabaseController {
throw error;
});
}
// Create unique indexes for authData providers to prevent race conditions
// during concurrent signups with the same authData
if (
databaseOptions.createIndexAuthDataUniqueness !== false &&
typeof this.adapter.ensureAuthDataUniqueness === 'function'
) {
const authProviders = Object.keys(this.options.auth || {});
if (this.options.enableAnonymousUsers !== false) {
if (!authProviders.includes('anonymous')) {
authProviders.push('anonymous');
}
}
await Promise.all(
authProviders.map(provider =>
this.adapter.ensureAuthDataUniqueness(provider).catch(error => {
logger.warn(
`Unable to ensure uniqueness for auth data provider "${provider}": `,
error
);
})
)
Comment thread
mtrezza marked this conversation as resolved.
);
}

await this.adapter.updateSchemaWithIndexes();
}

Expand Down
6 changes: 6 additions & 0 deletions src/Options/Definitions.js
Original file line number Diff line number Diff line change
Expand Up @@ -1169,6 +1169,12 @@ module.exports.DatabaseOptions = {
help: 'The MongoDB driver option to specify the amount of time, in milliseconds, to wait to establish a single TCP socket connection to the server before raising an error. Specifying 0 disables the connection timeout.',
action: parsers.numberParser('connectTimeoutMS'),
},
createIndexAuthDataUniqueness: {
env: 'PARSE_SERVER_DATABASE_CREATE_INDEX_AUTH_DATA_UNIQUENESS',
help: 'Set to `true` to automatically create unique indexes on the authData fields of the _User collection for each configured auth provider on server start. These indexes prevent race conditions during concurrent signups with the same authData. Set to `false` to skip index creation. Default is `true`.<br><br>\u26A0\uFE0F When setting this option to `false` to manually create the indexes, keep in mind that the otherwise automatically created indexes may change in the future to be optimized for the internal usage by Parse Server.',
action: parsers.booleanParser,
default: true,
},
createIndexRoleName: {
env: 'PARSE_SERVER_DATABASE_CREATE_INDEX_ROLE_NAME',
help: 'Set to `true` to automatically create a unique index on the name field of the _Role collection on server start. Set to `false` to skip index creation. Default is `true`.<br><br>\u26A0\uFE0F When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.',
Expand Down
1 change: 1 addition & 0 deletions src/Options/docs.js

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 3 additions & 0 deletions src/Options/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -806,6 +806,9 @@ export interface DatabaseOptions {
/* Set to `true` to automatically create a case-insensitive index on the username field of the _User collection on server start. Set to `false` to skip index creation. Default is `true`.<br><br>⚠️ When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.
:DEFAULT: true */
createIndexUserUsernameCaseInsensitive: ?boolean;
/* Set to `true` to automatically create unique indexes on the authData fields of the _User collection for each configured auth provider on server start. These indexes prevent race conditions during concurrent signups with the same authData. Set to `false` to skip index creation. Default is `true`.<br><br>⚠️ When setting this option to `false` to manually create the indexes, keep in mind that the otherwise automatically created indexes may change in the future to be optimized for the internal usage by Parse Server.
:DEFAULT: true */
createIndexAuthDataUniqueness: ?boolean;
Comment thread
coderabbitai[bot] marked this conversation as resolved.
Outdated
/* Set to `true` to automatically create a unique index on the name field of the _Role collection on server start. Set to `false` to skip index creation. Default is `true`.<br><br>⚠️ When setting this option to `false` to manually create the index, keep in mind that the otherwise automatically created index may change in the future to be optimized for the internal usage by Parse Server.
:DEFAULT: true */
createIndexRoleName: ?boolean;
Expand Down
Loading
Loading