Introduce a support for the concurrent session limit configuration. (#147442)

This commit is contained in:
Aleh Zasypkin 2023-01-17 14:26:49 +01:00 committed by GitHub
parent ed32d89848
commit 1ef52f0311
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
27 changed files with 2092 additions and 83 deletions

View file

@ -265,6 +265,7 @@ enabled:
- x-pack/test/security_api_integration/session_idle.config.ts
- x-pack/test/security_api_integration/session_invalidate.config.ts
- x-pack/test/security_api_integration/session_lifespan.config.ts
- x-pack/test/security_api_integration/session_concurrent_limit.config.ts
- x-pack/test/security_api_integration/token.config.ts
- x-pack/test/security_api_integration/user_profiles.config.ts
- x-pack/test/security_functional/login_selector.config.ts

View file

@ -381,6 +381,7 @@ kibana_vars=(
xpack.security.sameSiteCookies
xpack.security.secureCookies
xpack.security.session.cleanupInterval
xpack.security.session.concurrentSessions.maxSessions
xpack.security.session.idleTimeout
xpack.security.session.lifespan
xpack.security.sessionTimeout

View file

@ -15,11 +15,13 @@ import { AuthenticationResult } from '../authentication';
import {
httpRequestEvent,
savedObjectEvent,
sessionCleanupConcurrentLimitEvent,
sessionCleanupEvent,
SpaceAuditAction,
spaceAuditEvent,
userLoginEvent,
userLogoutEvent,
userSessionConcurrentLimitLogoutEvent,
} from './audit_events';
describe('#savedObjectEvent', () => {
@ -360,6 +362,63 @@ describe('#userLogoutEvent', () => {
});
});
describe('#userSessionConcurrentLimitLogoutEvent', () => {
test('creates event with `unknown` outcome', () => {
expect(
userSessionConcurrentLimitLogoutEvent({
username: 'elastic',
provider: { name: 'basic1', type: 'basic' },
userProfileId: 'uid',
})
).toMatchInlineSnapshot(`
Object {
"event": Object {
"action": "user_logout",
"category": Array [
"authentication",
],
"outcome": "unknown",
},
"kibana": Object {
"authentication_provider": "basic1",
"authentication_type": "basic",
},
"message": "User [elastic] is logging out due to exceeded concurrent sessions limit for basic provider [name=basic1]",
"user": Object {
"id": "uid",
"name": "elastic",
},
}
`);
expect(
userSessionConcurrentLimitLogoutEvent({
username: 'elastic',
provider: { name: 'basic1', type: 'basic' },
})
).toMatchInlineSnapshot(`
Object {
"event": Object {
"action": "user_logout",
"category": Array [
"authentication",
],
"outcome": "unknown",
},
"kibana": Object {
"authentication_provider": "basic1",
"authentication_type": "basic",
},
"message": "User [elastic] is logging out due to exceeded concurrent sessions limit for basic provider [name=basic1]",
"user": Object {
"id": undefined,
"name": "elastic",
},
}
`);
});
});
describe('#sessionCleanupEvent', () => {
test('creates event with `unknown` outcome', () => {
expect(
@ -391,6 +450,37 @@ describe('#sessionCleanupEvent', () => {
});
});
describe('#sessionCleanupConcurrentLimitEvent', () => {
test('creates event with `unknown` outcome', () => {
expect(
sessionCleanupConcurrentLimitEvent({
usernameHash: 'abcdef',
sessionId: 'sid',
provider: { name: 'basic1', type: 'basic' },
})
).toMatchInlineSnapshot(`
Object {
"event": Object {
"action": "session_cleanup",
"category": Array [
"authentication",
],
"outcome": "unknown",
},
"kibana": Object {
"authentication_provider": "basic1",
"authentication_type": "basic",
"session_id": "sid",
},
"message": "Removing session for user [hash=abcdef] due to exceeded concurrent sessions limit",
"user": Object {
"hash": "abcdef",
},
}
`);
});
});
describe('#httpRequestEvent', () => {
test('creates event with `unknown` outcome', () => {
expect(

View file

@ -215,6 +215,32 @@ export function userLogoutEvent({
};
}
export function userSessionConcurrentLimitLogoutEvent({
username,
provider,
userProfileId,
}: UserLogoutParams): AuditEvent {
return {
message: `User [${username}] is logging out due to exceeded concurrent sessions limit for ${provider.type} provider [name=${provider.name}]`,
event: {
action: 'user_logout',
category: ['authentication'],
outcome: 'unknown',
},
user:
userProfileId || username
? {
id: userProfileId,
name: username,
}
: undefined,
kibana: {
authentication_provider: provider.name,
authentication_type: provider.type,
},
};
}
export interface SessionCleanupParams {
sessionId: string;
usernameHash?: string;
@ -244,6 +270,29 @@ export function sessionCleanupEvent({
};
}
export function sessionCleanupConcurrentLimitEvent({
usernameHash,
sessionId,
provider,
}: SessionCleanupParams): AuditEvent {
return {
message: `Removing session for user [hash=${usernameHash}] due to exceeded concurrent sessions limit`,
event: {
action: 'session_cleanup',
category: ['authentication'],
outcome: 'unknown',
},
user: {
hash: usernameHash,
},
kibana: {
session_id: sessionId,
authentication_provider: provider.name,
authentication_type: provider.type,
},
};
}
export interface AccessAgreementAcknowledgedParams {
username: string;
provider: AuthenticationProvider;

View file

@ -11,7 +11,9 @@ export type { AuditEvent, AuditHttp, AuditKibana, AuditRequest } from './audit_e
export {
userLoginEvent,
userLogoutEvent,
userSessionConcurrentLimitLogoutEvent,
sessionCleanupEvent,
sessionCleanupConcurrentLimitEvent,
accessAgreementAcknowledgedEvent,
httpRequestEvent,
savedObjectEvent,

View file

@ -800,7 +800,7 @@ export class Authenticator {
await this.invalidateSessionValue({
request,
sessionValue: existingSessionValue,
skipAuditEvent: true, // Skip writing an audit event when we are replacing an intermediate session with a fullly authenticated session
skipAuditEvent: true, // Skip writing an audit event when we are replacing an intermediate session with a fully authenticated session
});
existingSessionValue = null;
} else if (usernameHasChanged) {

View file

@ -1412,6 +1412,40 @@ describe('config schema', () => {
'[session.cleanupInterval]: the value must be greater or equal to 10 seconds.'
);
});
it('should throw error if xpack.security.session.concurrentSessions.maxSessions is less than 1 or greater than 1000', () => {
expect(() =>
ConfigSchema.validate({ session: { concurrentSessions: { maxSessions: -1 } } })
).toThrow(
'[session.concurrentSessions.maxSessions]: Value must be equal to or greater than [1].'
);
expect(() =>
ConfigSchema.validate({ session: { concurrentSessions: { maxSessions: 0 } } })
).toThrow(
'[session.concurrentSessions.maxSessions]: Value must be equal to or greater than [1].'
);
expect(() =>
ConfigSchema.validate({ session: { concurrentSessions: { maxSessions: 1001 } } })
).toThrow(
'[session.concurrentSessions.maxSessions]: Value must be equal to or lower than [1000].'
);
});
it('can be successfully validate valid xpack.security.session.concurrentSessions.maxSessions', () => {
expect(ConfigSchema.validate({ session: { concurrentSessions: { maxSessions: 3 } } }).session)
.toMatchInlineSnapshot(`
Object {
"cleanupInterval": "PT1H",
"concurrentSessions": Object {
"maxSessions": 3,
},
"idleTimeout": "PT8H",
"lifespan": "P30D",
}
`);
});
});
});
@ -1465,6 +1499,40 @@ describe('createConfig()', () => {
`);
});
it('should log a warning if both concurrent sessions limit and HTTP authentication are configured', async () => {
const logger = loggingSystemMock.create();
const config = createConfig(
ConfigSchema.validate({ session: { concurrentSessions: { maxSessions: 3 } } }),
logger.get(),
{ isTLSEnabled: true }
);
expect(config.session.concurrentSessions?.maxSessions).toBe(3);
expect(config.authc.http.enabled).toBe(true);
expect(loggingSystemMock.collect(logger).warn).toMatchInlineSnapshot(`
Array [
Array [
"Both concurrent user sessions limit and HTTP authentication are configured. The limit does not apply to HTTP authentication.",
],
]
`);
loggingSystemMock.clear(logger);
const configWithoutHTTPAuth = createConfig(
ConfigSchema.validate({
session: { concurrentSessions: { maxSessions: 3 } },
authc: { http: { enabled: false } },
}),
logger.get(),
{ isTLSEnabled: true }
);
expect(configWithoutHTTPAuth.session.concurrentSessions?.maxSessions).toBe(3);
expect(configWithoutHTTPAuth.authc.http.enabled).toBe(false);
expect(loggingSystemMock.collect(logger).warn).toHaveLength(0);
});
it('should set xpack.security.secureCookies if SSL is configured', async () => {
const logger = loggingSystemMock.create().get();
const config = createConfig(ConfigSchema.validate({}), logger, { isTLSEnabled: true });

View file

@ -226,6 +226,11 @@ export const ConfigSchema = schema.object({
}
},
}),
concurrentSessions: schema.maybe(
schema.object({
maxSessions: schema.number({ min: 1, max: 1000 }),
})
),
}),
secureCookies: schema.boolean({ defaultValue: false }),
sameSiteCookies: schema.maybe(
@ -400,6 +405,13 @@ export function createConfig(
},
} as AppenderConfigType);
const session = getSessionConfig(config.session, providers);
if (session.concurrentSessions?.maxSessions != null && config.authc.http.enabled) {
logger.warn(
'Both concurrent user sessions limit and HTTP authentication are configured. The limit does not apply to HTTP authentication.'
);
}
return {
...config,
audit: {
@ -412,7 +424,7 @@ export function createConfig(
sortedProviders: Object.freeze(sortedProviders),
http: config.authc.http,
},
session: getSessionConfig(config.session, providers),
session,
encryptionKey,
secureCookies,
};
@ -420,6 +432,7 @@ export function createConfig(
function getSessionConfig(session: RawConfigType['session'], providers: ProvidersConfigType) {
return {
concurrentSessions: session.concurrentSessions,
cleanupInterval: session.cleanupInterval,
getExpirationTimeouts(provider: AuthenticationProvider | undefined) {
// Both idle timeout and lifespan from the provider specific session config can have three

View file

@ -370,7 +370,7 @@ export class SecurityPlugin
const clusterClient = core.elasticsearch.client;
const { watchOnlineStatus$ } = this.elasticsearchService.start();
const { session } = this.sessionManagementService.start({
auditLogger: this.auditSetup!.withoutRequest,
audit: this.auditSetup!,
elasticsearchClient: clusterClient.asInternalUser,
kibanaIndexName: this.getKibanaIndexName(),
online$: watchOnlineStatus$(),

View file

@ -11,7 +11,10 @@ import crypto from 'crypto';
import { httpServerMock, loggingSystemMock } from '@kbn/core/server/mocks';
import type { PublicMethodsOf } from '@kbn/utility-types';
import type { AuditLogger } from '..';
import { mockAuthenticatedUser } from '../../common/model/authenticated_user.mock';
import { userSessionConcurrentLimitLogoutEvent } from '../audit';
import { auditLoggerMock, auditServiceMock } from '../audit/mocks';
import { ConfigSchema, createConfig } from '../config';
import { sessionCookieMock, sessionIndexMock, sessionMock } from './index.mock';
import { getPrintableSessionId, Session, type SessionValueContentToEncrypt } from './session';
@ -27,6 +30,7 @@ describe('Session', () => {
let mockSessionIndex: jest.Mocked<PublicMethodsOf<SessionIndex>>;
let mockSessionCookie: jest.Mocked<PublicMethodsOf<SessionCookie>>;
let mockScopedAuditLogger: jest.Mocked<AuditLogger>;
let session: Session;
beforeEach(() => {
jest.spyOn(Date, 'now').mockImplementation(() => now);
@ -43,6 +47,10 @@ describe('Session', () => {
mockSessionCookie = sessionCookieMock.create();
mockSessionIndex = sessionIndexMock.create();
mockScopedAuditLogger = auditLoggerMock.create();
const mockAuditServiceSetup = auditServiceMock.create();
mockAuditServiceSetup.asScoped.mockReturnValue(mockScopedAuditLogger);
session = new Session({
logger: loggingSystemMock.createLogger(),
@ -56,6 +64,7 @@ describe('Session', () => {
),
sessionCookie: mockSessionCookie,
sessionIndex: mockSessionIndex,
audit: mockAuditServiceSetup,
});
});
@ -205,6 +214,40 @@ describe('Session', () => {
expect(mockSessionIndex.invalidate).toHaveBeenCalledTimes(1);
});
it('clears session value if the session is outside the concurrent session limit', async () => {
mockSessionCookie.get.mockResolvedValue(
sessionCookieMock.createValue({
aad: mockAAD,
idleTimeoutExpiration: now + 1,
lifespanExpiration: now + 1,
})
);
mockSessionIndex.get.mockResolvedValue(
sessionIndexMock.createValue({
content: await encryptContent(
{ username: 'some-user', state: 'some-state', userProfileId: 'uid' },
mockAAD
),
})
);
mockSessionIndex.isWithinConcurrentSessionLimit.mockResolvedValue(false);
await expect(session.get(httpServerMock.createKibanaRequest())).resolves.toEqual({
error: expect.any(SessionUnexpectedError),
value: null,
});
expect(mockSessionCookie.clear).toHaveBeenCalledTimes(1);
expect(mockSessionIndex.invalidate).toHaveBeenCalledTimes(1);
expect(mockScopedAuditLogger.log).toHaveBeenCalledTimes(1);
expect(mockScopedAuditLogger.log).toHaveBeenCalledWith(
userSessionConcurrentLimitLogoutEvent({
username: 'some-user',
userProfileId: 'uid',
provider: { name: 'basic1', type: 'basic' },
})
);
});
it('returns session value with decrypted content', async () => {
mockSessionCookie.get.mockResolvedValue(
sessionCookieMock.createValue({
@ -596,6 +639,7 @@ describe('Session', () => {
),
sessionCookie: mockSessionCookie,
sessionIndex: mockSessionIndex,
audit: auditServiceMock.create(),
});
const mockRequest = httpServerMock.createKibanaRequest();
@ -636,6 +680,7 @@ describe('Session', () => {
),
sessionCookie: mockSessionCookie,
sessionIndex: mockSessionIndex,
audit: auditServiceMock.create(),
});
const mockRequest = httpServerMock.createKibanaRequest();
@ -711,6 +756,7 @@ describe('Session', () => {
),
sessionCookie: mockSessionCookie,
sessionIndex: mockSessionIndex,
audit: auditServiceMock.create(),
});
const mockRequest = httpServerMock.createKibanaRequest();
@ -767,6 +813,7 @@ describe('Session', () => {
),
sessionCookie: mockSessionCookie,
sessionIndex: mockSessionIndex,
audit: auditServiceMock.create(),
});
});
@ -912,6 +959,7 @@ describe('Session', () => {
),
sessionCookie: mockSessionCookie,
sessionIndex: mockSessionIndex,
audit: auditServiceMock.create(),
});
const mockRequest = httpServerMock.createKibanaRequest();
@ -958,6 +1006,7 @@ describe('Session', () => {
),
sessionCookie: mockSessionCookie,
sessionIndex: mockSessionIndex,
audit: auditServiceMock.create(),
});
const mockRequest = httpServerMock.createKibanaRequest();

View file

@ -13,7 +13,9 @@ import { promisify } from 'util';
import type { KibanaRequest, Logger } from '@kbn/core/server';
import type { PublicMethodsOf } from '@kbn/utility-types';
import type { AuditServiceSetup } from '..';
import type { AuthenticationProvider } from '../../common';
import { userSessionConcurrentLimitLogoutEvent } from '../audit';
import type { ConfigType } from '../config';
import type { SessionCookie } from './session_cookie';
import { SessionExpiredError, SessionMissingError, SessionUnexpectedError } from './session_errors';
@ -85,6 +87,7 @@ export interface SessionOptions {
readonly sessionIndex: PublicMethodsOf<SessionIndex>;
readonly sessionCookie: PublicMethodsOf<SessionCookie>;
readonly config: Pick<ConfigType, 'encryptionKey' | 'session'>;
readonly audit: AuditServiceSetup;
}
export interface SessionValueContentToEncrypt {
@ -194,6 +197,26 @@ export class Session {
return { error: new SessionUnexpectedError(), value: null };
}
// The only reason why we check if the session is within the concurrent session limit _after_ decryption
// is to record decrypted username and profile id in the audit logs.
const isSessionWithinConcurrentSessionLimit =
await this.options.sessionIndex.isWithinConcurrentSessionLimit(sessionIndexValue);
if (!isSessionWithinConcurrentSessionLimit) {
this.options.audit.asScoped(request).log(
userSessionConcurrentLimitLogoutEvent({
username: decryptedContent.username,
userProfileId: decryptedContent.userProfileId,
provider: sessionIndexValue.provider,
})
);
sessionLogger.warn(
'Session is outside the concurrent session limit and will be invalidated.'
);
await this.invalidate(request, { match: 'current' });
return { error: new SessionUnexpectedError(), value: null };
}
return {
error: null,
value: {

View file

@ -17,6 +17,7 @@ export const sessionIndexMock = {
invalidate: jest.fn(),
initialize: jest.fn(),
cleanUp: jest.fn(),
isWithinConcurrentSessionLimit: jest.fn().mockResolvedValue(true),
}),
createValue: (sessionValue: Partial<SessionIndexValue> = {}): SessionIndexValue => ({

View file

@ -10,6 +10,7 @@ import type {
BulkResponse,
ClosePointInTimeResponse,
DeleteByQueryResponse,
MsearchMultiSearchResult,
OpenPointInTimeResponse,
SearchResponse,
} from '@elastic/elasticsearch/lib/api/types';
@ -18,6 +19,7 @@ import { elasticsearchServiceMock, loggingSystemMock } from '@kbn/core/server/mo
import type { AuditLogger } from '../audit';
import { auditLoggerMock } from '../audit/mocks';
import { AnonymousAuthenticationProvider } from '../authentication';
import { ConfigSchema, createConfig } from '../config';
import { securityMock } from '../mocks';
import {
@ -36,20 +38,23 @@ describe('Session index', () => {
const indexName = '.kibana_some_tenant_security_session_1';
const aliasName = '.kibana_some_tenant_security_session';
const indexTemplateName = '.kibana_some_tenant_security_session_index_template_1';
const createSessionIndexOptions = (
config: Record<string, any> = { session: { idleTimeout: null, lifespan: null } }
) => ({
logger: loggingSystemMock.createLogger(),
kibanaIndexName: '.kibana_some_tenant',
config: createConfig(ConfigSchema.validate(config), loggingSystemMock.createLogger(), {
isTLSEnabled: false,
}),
elasticsearchClient: mockElasticsearchClient,
auditLogger,
});
beforeEach(() => {
mockElasticsearchClient = elasticsearchServiceMock.createElasticsearchClient();
auditLogger = auditLoggerMock.create();
sessionIndex = new SessionIndex({
logger: loggingSystemMock.createLogger(),
kibanaIndexName: '.kibana_some_tenant',
config: createConfig(
ConfigSchema.validate({ session: { idleTimeout: null, lifespan: null } }),
loggingSystemMock.createLogger(),
{ isTLSEnabled: false }
),
elasticsearchClient: mockElasticsearchClient,
auditLogger,
});
sessionIndex = new SessionIndex(createSessionIndexOptions());
});
describe('#initialize', () => {
@ -947,6 +952,318 @@ describe('Session index', () => {
})
);
});
describe('concurrent session limit', () => {
const expectedSearchParameters = () => ({
index: '.kibana_some_tenant_security_session',
query: {
bool: {
filter: [
{ exists: { field: 'usernameHash' } },
{
bool: {
must_not: [{ term: { 'provider.type': AnonymousAuthenticationProvider.type } }],
},
},
],
},
},
aggs: {
sessions_grouped_by_user: {
multi_terms: {
size: 10000,
terms: [
{ field: 'usernameHash' },
{ field: 'provider.type' },
{ field: 'provider.name' },
],
min_doc_count: 3,
},
},
},
size: 0,
filter_path: [
'aggregations.sessions_grouped_by_user.sum_other_doc_count',
'aggregations.sessions_grouped_by_user.buckets.key',
'aggregations.sessions_grouped_by_user.buckets.doc_count',
],
track_total_hits: false,
});
const expectedMultiSearchParameters = (
usernameHash: string,
providerType: string,
providerName: string
) => ({
query: {
bool: {
must: [
{ term: { usernameHash } },
{ term: { 'provider.type': providerType } },
{ term: { 'provider.name': providerName } },
],
},
},
sort: [{ createdAt: { order: 'desc' } }],
from: 2,
size: 9998,
_source: false,
track_total_hits: false,
});
beforeEach(() => {
// The first search call is used by the invalid/expired sessions cleanup routine.
mockElasticsearchClient.search.mockResolvedValueOnce({
hits: { hits: [] },
} as unknown as SearchResponse);
sessionIndex = new SessionIndex(
createSessionIndexOptions({
session: { idleTimeout: null, lifespan: null, concurrentSessions: { maxSessions: 2 } },
})
);
});
it('when concurrent session limit is not configured', async () => {
sessionIndex = new SessionIndex(createSessionIndexOptions());
await sessionIndex.cleanUp();
// Only search call for the invalid sessions (use `pit` as marker, since concurrent session limit cleanup
// routine doesn't rely on PIT).
expect(mockElasticsearchClient.search).toHaveBeenCalledTimes(1);
expect(mockElasticsearchClient.search).toHaveBeenCalledWith(
expect.objectContaining({ pit: { id: 'PIT_ID', keep_alive: '5m' } })
);
expect(mockElasticsearchClient.msearch).not.toHaveBeenCalled();
expect(mockElasticsearchClient.bulk).not.toHaveBeenCalled();
expect(mockElasticsearchClient.indices.refresh).not.toHaveBeenCalled();
});
it('when the concurrent session limit is not exceeded', async () => {
mockElasticsearchClient.search.mockResolvedValueOnce({
aggregations: { sessions_grouped_by_user: { sum_other_doc_count: 1 } },
} as unknown as SearchResponse);
await sessionIndex.cleanUp();
// Only search call for the invalid sessions (use `pit` as marker, since concurrent session limit cleanup
// routine doesn't rely on PIT).
expect(mockElasticsearchClient.search).toHaveBeenCalledTimes(2);
expect(mockElasticsearchClient.search).toHaveBeenNthCalledWith(
2,
expectedSearchParameters()
);
expect(mockElasticsearchClient.msearch).not.toHaveBeenCalled();
expect(mockElasticsearchClient.bulk).not.toHaveBeenCalled();
expect(mockElasticsearchClient.indices.refresh).not.toHaveBeenCalled();
});
it('when the concurrent session limit is exceeded', async () => {
mockElasticsearchClient.search.mockResolvedValueOnce({
aggregations: {
sessions_grouped_by_user: {
sum_other_doc_count: 1,
buckets: [{ key: ['user-hash-name', 'basic', 'basic1'], doc_count: 10 }],
},
},
} as unknown as SearchResponse);
mockElasticsearchClient.msearch.mockResolvedValue({
responses: [{ status: 200, hits: { hits: [{ _id: 'some-id' }, { _id: 'some-id-2' }] } }],
} as MsearchMultiSearchResult);
await sessionIndex.cleanUp();
// Only search call for the invalid sessions (use `pit` as marker, since concurrent session limit cleanup
// routine doesn't rely on PIT).
expect(mockElasticsearchClient.search).toHaveBeenCalledTimes(2);
expect(mockElasticsearchClient.search).toHaveBeenNthCalledWith(
2,
expectedSearchParameters()
);
expect(mockElasticsearchClient.msearch).toHaveBeenCalledTimes(1);
expect(mockElasticsearchClient.msearch).toHaveBeenCalledWith({
index: '.kibana_some_tenant_security_session',
searches: [{}, expectedMultiSearchParameters('user-hash-name', 'basic', 'basic1')],
filter_path: ['responses.status', 'responses.hits.hits._id'],
});
expect(mockElasticsearchClient.bulk).toHaveBeenCalledTimes(1);
expect(mockElasticsearchClient.bulk).toHaveBeenCalledWith(
{
index: '.kibana_some_tenant_security_session',
operations: [{ delete: { _id: 'some-id' } }, { delete: { _id: 'some-id-2' } }],
refresh: false,
require_alias: true,
},
{ ignore: [409, 404] }
);
expect(mockElasticsearchClient.indices.refresh).toHaveBeenCalledTimes(1);
});
it('when the concurrent session limit is exceeded for multiple providers', async () => {
mockElasticsearchClient.search.mockResolvedValueOnce({
aggregations: {
sessions_grouped_by_user: {
sum_other_doc_count: 1,
buckets: [
{ key: ['user-hash-name', 'basic', 'basic1'], doc_count: 10 },
// For this we simulate a race condition, when the limit is exceeded during aggregation, but not during
// `msearch` query.
{ key: ['user-hash-name-2', 'basic', 'basic1'], doc_count: 1 },
{ key: ['user-hash-name-3', 'saml', 'saml1'], doc_count: 10 },
],
},
},
} as unknown as SearchResponse);
mockElasticsearchClient.msearch.mockResolvedValue({
responses: [
{ status: 200, hits: { hits: [{ _id: 'some-id' }, { _id: 'some-id-2' }] } },
{ status: 200 },
{ status: 200, hits: { hits: [{ _id: 'some-id-3' }] } },
],
} as MsearchMultiSearchResult);
await sessionIndex.cleanUp();
// Only search call for the invalid sessions (use `pit` as marker, since concurrent session limit cleanup
// routine doesn't rely on PIT).
expect(mockElasticsearchClient.search).toHaveBeenCalledTimes(2);
expect(mockElasticsearchClient.search).toHaveBeenNthCalledWith(
2,
expectedSearchParameters()
);
expect(mockElasticsearchClient.msearch).toHaveBeenCalledTimes(1);
expect(mockElasticsearchClient.msearch).toHaveBeenCalledWith({
index: '.kibana_some_tenant_security_session',
searches: [
{},
expectedMultiSearchParameters('user-hash-name', 'basic', 'basic1'),
{},
expectedMultiSearchParameters('user-hash-name-2', 'basic', 'basic1'),
{},
expectedMultiSearchParameters('user-hash-name-3', 'saml', 'saml1'),
],
filter_path: ['responses.status', 'responses.hits.hits._id'],
});
expect(mockElasticsearchClient.bulk).toHaveBeenCalledTimes(1);
expect(mockElasticsearchClient.bulk).toHaveBeenCalledWith(
{
index: '.kibana_some_tenant_security_session',
operations: [
{ delete: { _id: 'some-id' } },
{ delete: { _id: 'some-id-2' } },
{ delete: { _id: 'some-id-3' } },
],
refresh: false,
require_alias: true,
},
{ ignore: [409, 404] }
);
expect(mockElasticsearchClient.indices.refresh).toHaveBeenCalledTimes(1);
});
it('should call bulk_delete in multiple chunks if total number of session to delete exceeds 10_000', async () => {
mockElasticsearchClient.search.mockResolvedValueOnce({
aggregations: {
sessions_grouped_by_user: {
sum_other_doc_count: 1,
buckets: [{ key: ['user-hash-name', 'basic', 'basic1'], doc_count: 10 }],
},
},
} as unknown as SearchResponse);
mockElasticsearchClient.msearch.mockResolvedValue({
responses: [
{
status: 200,
hits: {
hits: Array.from({ length: 10002 }).map((_, index) => ({
_id: `some-id-${index}`,
})),
},
},
],
} as MsearchMultiSearchResult);
await sessionIndex.cleanUp();
expect(mockElasticsearchClient.bulk).toHaveBeenCalledTimes(2);
expect(mockElasticsearchClient.bulk).toHaveBeenNthCalledWith(
1,
{
index: '.kibana_some_tenant_security_session',
operations: expect.arrayContaining([
{ delete: { _id: 'some-id-0' } },
{ delete: { _id: 'some-id-9999' } },
]),
refresh: false,
require_alias: true,
},
{ ignore: [409, 404] }
);
expect(mockElasticsearchClient.bulk).toHaveBeenNthCalledWith(
2,
{
index: '.kibana_some_tenant_security_session',
operations: [
{ delete: { _id: 'some-id-10000' } },
{ delete: { _id: 'some-id-10001' } },
],
refresh: false,
require_alias: true,
},
{ ignore: [409, 404] }
);
expect(mockElasticsearchClient.indices.refresh).toHaveBeenCalledTimes(1);
});
it('should log audit event', async () => {
mockElasticsearchClient.search.mockResolvedValueOnce({
aggregations: {
sessions_grouped_by_user: {
sum_other_doc_count: 1,
buckets: [
{ key: ['user-hash-name', 'basic', 'basic1'], doc_count: 3 },
{ key: ['user-hash-name-2', 'saml', 'saml1'], doc_count: 3 },
],
},
},
} as unknown as SearchResponse);
mockElasticsearchClient.msearch.mockResolvedValue({
responses: [
{ status: 200, hits: { hits: [{ _id: 'some-id' }] } },
{ status: 200, hits: { hits: [{ _id: 'some-id-2' }] } },
],
} as MsearchMultiSearchResult);
await sessionIndex.cleanUp();
expect(auditLogger.log).toHaveBeenCalledTimes(2);
expect(auditLogger.log).toHaveBeenCalledWith(
expect.objectContaining({
event: { action: 'session_cleanup', category: ['authentication'], outcome: 'unknown' },
user: { hash: 'user-hash-name' },
kibana: {
session_id: 'some-id',
authentication_provider: 'basic1',
authentication_type: 'basic',
},
})
);
expect(auditLogger.log).toHaveBeenCalledWith(
expect.objectContaining({
event: { action: 'session_cleanup', category: ['authentication'], outcome: 'unknown' },
user: { hash: 'user-hash-name-2' },
kibana: {
session_id: 'some-id-2',
authentication_provider: 'saml1',
authentication_type: 'saml',
},
})
);
});
});
});
describe('#get', () => {
@ -1480,4 +1797,167 @@ describe('Session index', () => {
});
});
});
describe('#isWithinConcurrentSessionLimit', () => {
const expectedSearchParameters = () => ({
index: '.kibana_some_tenant_security_session',
query: {
bool: {
filter: [
{ term: { usernameHash: 'some-username-hash' } },
{ term: { 'provider.type': 'basic' } },
{ term: { 'provider.name': 'basic1' } },
],
},
},
sort: [{ createdAt: { order: 'desc' } }],
_source_includes: 'createdAt',
filter_path: 'hits.hits._source',
from: 2,
size: 1,
track_total_hits: false,
});
beforeEach(() => {
sessionIndex = new SessionIndex(
createSessionIndexOptions({
session: { idleTimeout: null, lifespan: null, concurrentSessions: { maxSessions: 2 } },
})
);
});
it('throws if call to Elasticsearch fails', async () => {
const failureReason = new errors.ResponseError(
securityMock.createApiResponse(securityMock.createApiResponse({ body: { type: 'Uh oh.' } }))
);
mockElasticsearchClient.search.mockRejectedValue(failureReason);
await expect(
sessionIndex.isWithinConcurrentSessionLimit(sessionIndexMock.createValue())
).rejects.toBe(failureReason);
expect(mockElasticsearchClient.search).toHaveBeenCalledTimes(1);
expect(mockElasticsearchClient.search).toHaveBeenCalledWith(expectedSearchParameters());
});
it('returns `true` if concurrent session limit is not configured', async () => {
sessionIndex = new SessionIndex(createSessionIndexOptions());
await expect(
sessionIndex.isWithinConcurrentSessionLimit(sessionIndexMock.createValue())
).resolves.toBe(true);
expect(mockElasticsearchClient.search).not.toHaveBeenCalled();
});
it('returns `true` for unauthenticated sessions', async () => {
await expect(
sessionIndex.isWithinConcurrentSessionLimit(
sessionIndexMock.createValue({ usernameHash: undefined })
)
).resolves.toBe(true);
expect(mockElasticsearchClient.search).not.toHaveBeenCalled();
});
it('returns `true` if session belongs to the anonymous user', async () => {
await expect(
sessionIndex.isWithinConcurrentSessionLimit(
sessionIndexMock.createValue({
createdAt: 100,
provider: { type: AnonymousAuthenticationProvider.type, name: 'anonymous1' },
})
)
).resolves.toBe(true);
expect(mockElasticsearchClient.search).not.toHaveBeenCalled();
});
it('returns `true` if session is within limit', async () => {
for (const value of [
{} as SearchResponse,
{ hits: { hits: [] } } as unknown as SearchResponse,
]) {
mockElasticsearchClient.search.mockResolvedValue(value);
await expect(
sessionIndex.isWithinConcurrentSessionLimit(sessionIndexMock.createValue())
).resolves.toBe(true);
expect(mockElasticsearchClient.search).toHaveBeenCalledTimes(1);
expect(mockElasticsearchClient.search).toHaveBeenCalledWith(expectedSearchParameters());
mockElasticsearchClient.search.mockClear();
}
});
it('returns `true` if the specified session is not a legacy session, but the first session that is outside the limit is a legacy one', async () => {
mockElasticsearchClient.search.mockResolvedValue({
hits: { hits: [{ _source: {} }] },
} as SearchResponse);
await expect(
sessionIndex.isWithinConcurrentSessionLimit(sessionIndexMock.createValue())
).resolves.toBe(true);
expect(mockElasticsearchClient.search).toHaveBeenCalledTimes(1);
expect(mockElasticsearchClient.search).toHaveBeenCalledWith(expectedSearchParameters());
});
it('returns `true` if the first session that is outside the limit is older than the specified session', async () => {
mockElasticsearchClient.search.mockResolvedValue({
hits: { hits: [{ _source: { createdAt: 100 } }] },
} as SearchResponse);
await expect(
sessionIndex.isWithinConcurrentSessionLimit(
sessionIndexMock.createValue({ createdAt: 200 })
)
).resolves.toBe(true);
expect(mockElasticsearchClient.search).toHaveBeenCalledTimes(1);
expect(mockElasticsearchClient.search).toHaveBeenCalledWith(expectedSearchParameters());
});
it('returns `false` if the limit is exceeded and specified session is a legacy session', async () => {
mockElasticsearchClient.search.mockResolvedValue({
hits: { hits: [{ _source: { createdAt: 100 } }] },
} as SearchResponse);
await expect(
sessionIndex.isWithinConcurrentSessionLimit(
sessionIndexMock.createValue({ createdAt: undefined })
)
).resolves.toBe(false);
expect(mockElasticsearchClient.search).toHaveBeenCalledTimes(1);
expect(mockElasticsearchClient.search).toHaveBeenCalledWith(expectedSearchParameters());
});
it('returns `false` if the first session that is outside the limit was created at the same time as the specified session', async () => {
mockElasticsearchClient.search.mockResolvedValue({
hits: { hits: [{ _source: { createdAt: 100 } }] },
} as SearchResponse);
await expect(
sessionIndex.isWithinConcurrentSessionLimit(
sessionIndexMock.createValue({ createdAt: 100 })
)
).resolves.toBe(false);
expect(mockElasticsearchClient.search).toHaveBeenCalledTimes(1);
expect(mockElasticsearchClient.search).toHaveBeenCalledWith(expectedSearchParameters());
});
it('returns `false` if the specified session is older than the first session that is outside the limit', async () => {
mockElasticsearchClient.search.mockResolvedValue({
hits: { hits: [{ _source: { createdAt: 200 } }] },
} as SearchResponse);
await expect(
sessionIndex.isWithinConcurrentSessionLimit(
sessionIndexMock.createValue({ createdAt: 100 })
)
).resolves.toBe(false);
expect(mockElasticsearchClient.search).toHaveBeenCalledTimes(1);
expect(mockElasticsearchClient.search).toHaveBeenCalledWith(expectedSearchParameters());
});
});
});

View file

@ -5,7 +5,14 @@
* 2.0.
*/
import type { CreateRequest, IndicesCreateRequest } from '@elastic/elasticsearch/lib/api/types';
import type {
AggregateName,
AggregationsMultiTermsAggregate,
CreateRequest,
IndicesCreateRequest,
MsearchRequestItem,
SearchHit,
} from '@elastic/elasticsearch/lib/api/types';
import type {
BulkOperationContainer,
SortResults,
@ -16,8 +23,10 @@ import type { ElasticsearchClient, Logger } from '@kbn/core/server';
import type { AuthenticationProvider } from '../../common/model';
import type { AuditLogger } from '../audit';
import { sessionCleanupEvent } from '../audit';
import { sessionCleanupConcurrentLimitEvent, sessionCleanupEvent } from '../audit';
import { AnonymousAuthenticationProvider } from '../authentication';
import type { ConfigType } from '../config';
import { getDetailedErrorMessage } from '../errors';
export interface SessionIndexOptions {
readonly elasticsearchClient: ElasticsearchClient;
@ -166,6 +175,11 @@ export interface SessionIndexValue {
metadata: SessionIndexValueMetadata;
}
/**
* Subset of the `SessionIndexValue` fields required for session cleanup.
*/
type SessionIndexValueDescriptor = Pick<SessionIndexValue, 'sid' | 'usernameHash' | 'provider'>;
/**
* Additional index specific information about the session value.
*/
@ -453,71 +467,153 @@ export class SessionIndex {
* Trigger a removal of any outdated session values.
*/
async cleanUp() {
const { auditLogger, elasticsearchClient, logger } = this.options;
logger.debug(`Running cleanup routine.`);
const { auditLogger, logger } = this.options;
logger.debug('Running cleanup routine.');
let error: Error | undefined;
let indexNeedsRefresh = false;
try {
for await (const sessionValues of this.getSessionValuesInBatches()) {
const operations: Array<Required<Pick<BulkOperationContainer, 'delete'>>> = [];
sessionValues.forEach(({ _id, _source }) => {
const operations = sessionValues.map(({ _id, _source }) => {
const { usernameHash, provider } = _source!;
auditLogger.log(sessionCleanupEvent({ sessionId: _id, usernameHash, provider }));
operations.push({ delete: { _id } });
return { delete: { _id } };
});
if (operations.length > 0) {
const bulkResponse = await elasticsearchClient.bulk(
{
index: this.aliasName,
operations,
refresh: false,
// delete operations do not respect `require_alias`, but we include it here for consistency.
require_alias: true,
},
{ ignore: [409, 404] }
);
if (bulkResponse.errors) {
const errorCount = bulkResponse.items.reduce(
(count, item) => (item.delete!.error ? count + 1 : count),
0
);
if (errorCount < bulkResponse.items.length) {
logger.warn(
`Failed to clean up ${errorCount} of ${bulkResponse.items.length} invalid or expired sessions. The remaining sessions were cleaned up successfully.`
);
indexNeedsRefresh = true;
} else {
logger.error(
`Failed to clean up ${bulkResponse.items.length} invalid or expired sessions.`
);
}
} else {
logger.debug(`Cleaned up ${bulkResponse.items.length} invalid or expired sessions.`);
indexNeedsRefresh = true;
}
}
indexNeedsRefresh = (await this.bulkDeleteSessions(operations)) || indexNeedsRefresh;
}
} catch (err) {
logger.error(`Failed to clean up sessions: ${err.message}`);
error = err;
}
// Only refresh the index if we have actually deleted one or more sessions. The index will auto-refresh eventually anyway, this just
// ensures that searches after the cleanup process are accurate, and this only impacts integration tests.
if (indexNeedsRefresh) {
// Only refresh the index if we have actually deleted one or more sessions. The index will auto-refresh eventually anyway, this just
// ensures that searches after the cleanup process are accurate, and this only impacts integration tests.
try {
await elasticsearchClient.indices.refresh({ index: this.aliasName });
logger.debug(`Refreshed session index.`);
} catch (err) {
logger.error(`Failed to refresh session index: ${err.message}`);
await this.refreshSessionIndex();
}
// Once index refresh is complete we can check if there are sessions left that exceed concurrent sessions limit.
try {
indexNeedsRefresh = false;
const operations = (await this.getSessionsOutsideConcurrentSessionLimit()).map((session) => {
auditLogger.log(
sessionCleanupConcurrentLimitEvent({
sessionId: session.sid,
usernameHash: session.usernameHash,
provider: session.provider,
})
);
return { delete: { _id: session.sid } };
});
if (operations.length > 0) {
// Limit max number of documents to delete to 10_000 to avoid massively large delete request payloads (10k batch
// delete request payload is about 700kb).
const batchSize = SESSION_INDEX_CLEANUP_BATCH_SIZE;
for (let i = 0; i < operations.length; i += batchSize) {
indexNeedsRefresh =
(await this.bulkDeleteSessions(operations.slice(i, i + batchSize))) ||
indexNeedsRefresh;
}
}
} catch (err) {
logger.error(
`Failed to clean up sessions that exceeded concurrent sessions limit: ${err.message}`
);
error = err;
}
if (indexNeedsRefresh) {
await this.refreshSessionIndex();
}
if (error) {
logger.error(`Cleanup routine failed: ${getDetailedErrorMessage(error)}.`);
// If we couldn't fetch or delete sessions, throw an error so the task will be retried.
throw error;
}
logger.debug('Cleanup routine successfully completed.');
}
/**
* Checks whether specific session is within a concurrent sessions limit.
* @param sessionValue Session index value to check against concurrent sessions limit.
*/
async isWithinConcurrentSessionLimit(sessionValue: Readonly<SessionIndexValue>) {
// Concurrent user sessions limit doesn't apply if it's not configured, or session isn't authenticated, or session
// belongs to the anonymous user.
const maxConcurrentSessions = this.options.config.session.concurrentSessions?.maxSessions;
if (
maxConcurrentSessions == null ||
!sessionValue.usernameHash ||
sessionValue.provider.type === AnonymousAuthenticationProvider.type
) {
return true;
}
let sessionsOutsideLimit: Array<SearchHit<SessionIndexValue>>;
try {
const searchResponse = await this.options.elasticsearchClient.search<SessionIndexValue>({
index: this.aliasName,
// Find all sessions created for the same user by the same authentication provider.
query: {
bool: {
filter: [
{ term: { usernameHash: sessionValue.usernameHash } },
{ term: { 'provider.type': sessionValue.provider.type } },
{ term: { 'provider.name': sessionValue.provider.name } },
],
},
},
// Sort sessions by creation date in descending order to get the most recent session that's also outside the
// limit. This query relies on a default value for `missing` sort parameter which is `_last`, meaning that
// sessions without `createdAt` field ("legacy" sessions) are always considered older than the ones that have
// this field populated. For example, if the limit is 2 the resulting set might look like this:
// { createdAt: 3 } <-- the most recent session (within the limit, not returned because of `from`)
// { createdAt: 2 } <-- the second most recent session (within the limit, not returned because of `from`)
// { createdAt: 1 } <-- the third most recent session (outside the limit, *returned*)
// { createdAt: undefined } <--- the oldest "legacy" session (outside the limit, not returned because of `size`)
sort: [{ createdAt: { order: 'desc' } }],
// Improve performance by fetching just one field of one outside-the-limit session and not tracking total hits.
_source_includes: 'createdAt',
filter_path: 'hits.hits._source',
from: maxConcurrentSessions,
size: 1,
track_total_hits: false,
});
sessionsOutsideLimit = searchResponse.hits?.hits ?? [];
} catch (err) {
this.options.logger.error(
`Failed to fetch user sessions to check concurrent sessions limit: ${err.message}.`
);
throw err;
}
// If all sessions are within the limit, then the provided one should be within the limit as well.
if (sessionsOutsideLimit.length === 0) {
return true;
}
// If there is any session that is outside the limit and the provided session is "legacy" session (doesn't have a
// `createdAt` field populated), then we can safely treat it as outside-the-limit session (all "legacy" sessions are
// treated equally).
if (!sessionValue.createdAt) {
return false;
}
// If the first outside-the-limit session doesn't have `createdAt` then all other sessions with `createdAt` are
// within the limit, otherwise the specified session is outside the limit only if it was created before or at the
// same time as the first outside-the-limit session.
const [{ _source: sessionOutsideLimit }] = sessionsOutsideLimit;
return (
!sessionOutsideLimit?.createdAt || sessionValue.createdAt > sessionOutsideLimit.createdAt
);
}
/**
@ -766,4 +862,231 @@ export class SessionIndex {
});
}
}
private async getSessionsOutsideConcurrentSessionLimit(): Promise<SessionIndexValueDescriptor[]> {
const maxConcurrentSessions = this.options.config.session.concurrentSessions?.maxSessions;
if (maxConcurrentSessions == null) {
return [];
}
// 1. We need to figure out what users have sessions that exceed the concurrent session limit. For that, we group
// existing sessions by username and authentication provider.
const aggResponse = await this.options.elasticsearchClient.search<
unknown,
Record<AggregateName, AggregationsMultiTermsAggregate>
>({
index: this.aliasName,
// Exclude unauthenticated sessions and sessions of the anonymous users that shouldn't be affected by the
// concurrent user sessions limit.
query: {
bool: {
filter: [
{ exists: { field: 'usernameHash' } },
{
bool: {
must_not: [{ term: { 'provider.type': AnonymousAuthenticationProvider.type } }],
},
},
],
},
},
aggs: {
sessions_grouped_by_user: {
multi_terms: {
// If we have more than 10_000 users that all exceeded the limit (highly unlikely), then the rest of the
// sessions will be cleaned up during the next run. It doesn't expose Kibana to any security risks since the
// concurrent sessions limits is enforced on fetch. The `size` is limited by `search.max_buckets` setting
// which is 65,536 by default, but we don't want to load Elasticsearch too much (response size for 10000
// buckets is around 1mb).
size: SESSION_INDEX_CLEANUP_BATCH_SIZE,
terms: [
{ field: 'usernameHash' },
{ field: 'provider.type' },
{ field: 'provider.name' },
],
// Return only those groups that exceed the limit.
min_doc_count: maxConcurrentSessions + 1,
},
},
},
// Improve performance by not tracking total hits, not returning hits themselves (size=0), and fetching only buckets keys.
size: 0,
filter_path: [
'aggregations.sessions_grouped_by_user.sum_other_doc_count',
'aggregations.sessions_grouped_by_user.buckets.key',
'aggregations.sessions_grouped_by_user.buckets.doc_count',
],
track_total_hits: false,
});
// The reason we check if buckets is an array is to narrow down the type of the response since ES can return buckets as
// either an array OR a dictionary (aggregation has keys configured for the different buckets, that's not the case here).
const sessionsGroupedByUser = aggResponse.aggregations?.sessions_grouped_by_user;
const sessionBuckets = sessionsGroupedByUser?.buckets ?? [];
if (sessionBuckets.length === 0 || !Array.isArray(sessionBuckets)) {
return [];
}
// Log a warning if we didn't fetch buckets for all users that exceeded the limit.
const ungroupedSessions = sessionsGroupedByUser?.sum_other_doc_count ?? 0;
if (ungroupedSessions > 0) {
this.options.logger.warn(
`Unable to check if remaining ${ungroupedSessions} sessions exceed the concurrent session limit. Sessions will be checked during the next cleanup job run.`
);
}
// 2. Once we know what users within what authentication providers exceed the concurrent sessions limit, we can
// fetch specific sessions documents that are outside the limit.
const { sessionGroups, sessionQueries, skippedSessions } = sessionBuckets.reduce(
(result, sessionGroup) => {
// The keys are arrays of values ordered the same ways as expression in the terms parameter of the aggregation.
const [usernameHash, providerType, providerName] = sessionGroup.key as string[];
// Record a number of session documents that won't be included in the batch during this run.
if (sessionGroup.doc_count > SESSION_INDEX_CLEANUP_BATCH_SIZE) {
result.skippedSessions += sessionGroup.doc_count - SESSION_INDEX_CLEANUP_BATCH_SIZE;
}
result.sessionGroups.push({
usernameHash,
provider: { type: providerType, name: providerName },
});
result.sessionQueries.push(
{},
{
query: {
bool: {
must: [
{ term: { usernameHash } },
{ term: { 'provider.type': providerType } },
{ term: { 'provider.name': providerName } },
],
},
},
// Sort sessions by creation date in descending order to get the most recent session that's also outside the
// limit. Refer to comment in `isWithinConcurrentSessionLimit` for the explanation and example.
sort: [{ createdAt: { order: 'desc' } }],
// We only need to fetch sessions that exceed the limit.
from: maxConcurrentSessions,
size: SESSION_INDEX_CLEANUP_BATCH_SIZE - maxConcurrentSessions,
// Improve performance by not tracking total hits and not fetching _source since we already have all necessary
// data returned within aggregation buckets (`usernameHash` and `provider`).
_source: false,
track_total_hits: false,
}
);
return result;
},
{ sessionGroups: [], sessionQueries: [], skippedSessions: 0 } as {
sessionGroups: Array<Pick<SessionIndexValue, 'usernameHash' | 'provider'>>;
sessionQueries: MsearchRequestItem[];
skippedSessions: number;
}
);
// Log a warning if we didn't fetch all sessions that exceeded the limit.
if (skippedSessions > 0) {
this.options.logger.warn(
`Unable to fetch ${skippedSessions} sessions that exceed the concurrent session limit. Sessions will be fetched and invalidated during the next cleanup job run.`
);
}
const { responses } = await this.options.elasticsearchClient.msearch({
index: this.aliasName,
searches: sessionQueries,
filter_path: ['responses.status', 'responses.hits.hits._id'],
});
const sessionValueDescriptors = responses.flatMap<SessionIndexValueDescriptor>(
(response, index) => {
if ('error' in response) {
this.options.logger.error(
`Failed to fetch sessions that exceed the concurrent session limit: ${
getDetailedErrorMessage(response.error) ??
response.error.reason ??
response.error.type
}.`
);
return [];
}
return response.hits?.hits?.map((hit) => ({ sid: hit._id, ...sessionGroups[index] })) ?? [];
}
);
this.options.logger.debug(
`Preparing to delete ${sessionValueDescriptors.length} sessions of ${sessionBuckets.length} unique users due to exceeded concurrent sessions limit.`
);
return sessionValueDescriptors;
}
/**
* Performs a bulk delete operation on the Kibana session index.
* @param deleteOperations Bulk delete operations.
* @returns Returns `true` if the bulk delete affected any session document.
*/
private async bulkDeleteSessions(
deleteOperations: Array<Required<Pick<BulkOperationContainer, 'delete'>>>
) {
if (deleteOperations.length === 0) {
return false;
}
const bulkResponse = await this.options.elasticsearchClient.bulk(
{
index: this.aliasName,
operations: deleteOperations,
refresh: false,
// delete operations do not respect `require_alias`, but we include it here for consistency.
require_alias: true,
},
{ ignore: [409, 404] }
);
if (!bulkResponse.errors) {
this.options.logger.debug(
`Cleaned up ${bulkResponse.items.length} invalid or expired sessions.`
);
return true;
}
const errorCount = bulkResponse.items.reduce(
(count, item) => (item.delete!.error ? count + 1 : count),
0
);
if (errorCount < bulkResponse.items.length) {
this.options.logger.warn(
`Failed to clean up ${errorCount} of ${bulkResponse.items.length} invalid or expired sessions. The remaining sessions were cleaned up successfully.`
);
return true;
}
this.options.logger.error(
`Failed to clean up ${bulkResponse.items.length} invalid or expired sessions.`
);
return false;
}
/**
* Refreshes Kibana session index. This is used as a part of the session index cleanup job only and hence doesn't
* throw even if the operation fails.
*/
private async refreshSessionIndex() {
try {
await this.options.elasticsearchClient.indices.refresh({ index: this.aliasName });
this.options.logger.debug(`Refreshed session index.`);
} catch (err) {
this.options.logger.error(`Failed to refresh session index: ${err.message}`);
}
}
}

View file

@ -15,8 +15,8 @@ import type {
import { taskManagerMock } from '@kbn/task-manager-plugin/server/mocks';
import { nextTick } from '@kbn/test-jest-helpers';
import type { AuditLogger } from '../audit';
import { auditLoggerMock } from '../audit/mocks';
import type { AuditServiceSetup } from '../audit';
import { auditServiceMock } from '../audit/mocks';
import { ConfigSchema, createConfig } from '../config';
import type { OnlineStatusRetryScheduler } from '../elasticsearch';
import { Session } from './session';
@ -34,10 +34,10 @@ mockSessionIndexCleanUp.mockResolvedValue();
describe('SessionManagementService', () => {
let service: SessionManagementService;
let auditLogger: AuditLogger;
let auditSetupMock: AuditServiceSetup;
beforeEach(() => {
service = new SessionManagementService(loggingSystemMock.createLogger());
auditLogger = auditLoggerMock.create();
auditSetupMock = auditServiceMock.create();
});
afterEach(() => {
@ -100,7 +100,7 @@ describe('SessionManagementService', () => {
const mockStatusSubject = new Subject<OnlineStatusRetryScheduler>();
expect(
service.start({
auditLogger,
audit: auditSetupMock,
elasticsearchClient: elasticsearchServiceMock.createElasticsearchClient(),
kibanaIndexName: '.kibana',
online$: mockStatusSubject.asObservable(),
@ -112,7 +112,7 @@ describe('SessionManagementService', () => {
it('registers proper session index cleanup task runner', async () => {
const mockStatusSubject = new Subject<OnlineStatusRetryScheduler>();
service.start({
auditLogger,
audit: auditSetupMock,
elasticsearchClient: elasticsearchServiceMock.createElasticsearchClient(),
kibanaIndexName: '.kibana',
online$: mockStatusSubject.asObservable(),
@ -132,7 +132,7 @@ describe('SessionManagementService', () => {
it('initializes session index and schedules session index cleanup task when Elasticsearch goes online', async () => {
const mockStatusSubject = new Subject<OnlineStatusRetryScheduler>();
service.start({
auditLogger,
audit: auditSetupMock,
elasticsearchClient: elasticsearchServiceMock.createElasticsearchClient(),
kibanaIndexName: '.kibana',
online$: mockStatusSubject.asObservable(),
@ -170,7 +170,7 @@ describe('SessionManagementService', () => {
it('removes old cleanup task if cleanup interval changes', async () => {
const mockStatusSubject = new Subject<OnlineStatusRetryScheduler>();
service.start({
auditLogger,
audit: auditSetupMock,
elasticsearchClient: elasticsearchServiceMock.createElasticsearchClient(),
kibanaIndexName: '.kibana',
online$: mockStatusSubject.asObservable(),
@ -206,7 +206,7 @@ describe('SessionManagementService', () => {
it('does not remove old cleanup task if cleanup interval does not change', async () => {
const mockStatusSubject = new Subject<OnlineStatusRetryScheduler>();
service.start({
auditLogger,
audit: auditSetupMock,
elasticsearchClient: elasticsearchServiceMock.createElasticsearchClient(),
kibanaIndexName: '.kibana',
online$: mockStatusSubject.asObservable(),
@ -233,7 +233,7 @@ describe('SessionManagementService', () => {
it('schedules retry if index initialization fails', async () => {
const mockStatusSubject = new Subject<OnlineStatusRetryScheduler>();
service.start({
auditLogger,
audit: auditSetupMock,
elasticsearchClient: elasticsearchServiceMock.createElasticsearchClient(),
kibanaIndexName: '.kibana',
online$: mockStatusSubject.asObservable(),
@ -270,7 +270,7 @@ describe('SessionManagementService', () => {
it('schedules retry if cleanup task registration fails', async () => {
const mockStatusSubject = new Subject<OnlineStatusRetryScheduler>();
service.start({
auditLogger,
audit: auditSetupMock,
elasticsearchClient: elasticsearchServiceMock.createElasticsearchClient(),
kibanaIndexName: '.kibana',
online$: mockStatusSubject.asObservable(),
@ -323,7 +323,7 @@ describe('SessionManagementService', () => {
it('properly unsubscribes from status updates', () => {
const mockStatusSubject = new Subject<OnlineStatusRetryScheduler>();
service.start({
auditLogger,
audit: auditSetupMock,
elasticsearchClient: elasticsearchServiceMock.createElasticsearchClient(),
kibanaIndexName: '.kibana',
online$: mockStatusSubject.asObservable(),

View file

@ -14,7 +14,7 @@ import type {
TaskManagerStartContract,
} from '@kbn/task-manager-plugin/server';
import type { AuditLogger } from '../audit';
import type { AuditServiceSetup } from '../audit';
import type { ConfigType } from '../config';
import type { OnlineStatusRetryScheduler } from '../elasticsearch';
import { Session } from './session';
@ -32,7 +32,7 @@ export interface SessionManagementServiceStartParams {
readonly kibanaIndexName: string;
readonly online$: Observable<OnlineStatusRetryScheduler>;
readonly taskManager: TaskManagerStartContract;
readonly auditLogger: AuditLogger;
readonly audit: AuditServiceSetup;
}
export interface SessionManagementServiceStart {
@ -80,14 +80,14 @@ export class SessionManagementService {
kibanaIndexName,
online$,
taskManager,
auditLogger,
audit,
}: SessionManagementServiceStartParams): SessionManagementServiceStart {
this.sessionIndex = new SessionIndex({
config: this.config,
elasticsearchClient,
kibanaIndexName,
logger: this.logger.get('index'),
auditLogger,
auditLogger: audit.withoutRequest,
});
this.statusSubscription = online$.subscribe(async ({ scheduleRetry }) => {
@ -104,6 +104,7 @@ export class SessionManagementService {
sessionCookie: this.sessionCookie,
sessionIndex: this.sessionIndex,
config: this.config,
audit,
}),
};
}

View file

@ -49,6 +49,7 @@ describe('Security UsageCollector', () => {
sessionIdleTimeoutInMinutes: 480,
sessionLifespanInMinutes: 43200,
sessionCleanupInMinutes: 60,
sessionConcurrentSessionsMaxSessions: 0,
anonymousCredentialType: undefined,
};
@ -110,6 +111,7 @@ describe('Security UsageCollector', () => {
sessionIdleTimeoutInMinutes: 0,
sessionLifespanInMinutes: 0,
sessionCleanupInMinutes: 0,
sessionConcurrentSessionsMaxSessions: 0,
anonymousCredentialType: undefined,
});
});
@ -476,10 +478,15 @@ describe('Security UsageCollector', () => {
describe('session', () => {
// Note: can't easily test deprecated 'sessionTimeout' value here because of the way that config deprecation renaming works
it('reports customized session idleTimeout, lifespan, and cleanupInterval', async () => {
it('reports customized session idleTimeout, lifespan, cleanupInterval, and max concurrent sessions', async () => {
const config = createSecurityConfig(
ConfigSchema.validate({
session: { idleTimeout: '123m', lifespan: '456m', cleanupInterval: '789m' },
session: {
idleTimeout: '123m',
lifespan: '456m',
cleanupInterval: '789m',
concurrentSessions: { maxSessions: 321 },
},
})
);
const usageCollection = usageCollectionPluginMock.createSetupContract();
@ -495,6 +502,7 @@ describe('Security UsageCollector', () => {
sessionIdleTimeoutInMinutes: 123,
sessionLifespanInMinutes: 456,
sessionCleanupInMinutes: 789,
sessionConcurrentSessionsMaxSessions: 321,
});
});
});

View file

@ -20,6 +20,7 @@ interface Usage {
sessionIdleTimeoutInMinutes: number;
sessionLifespanInMinutes: number;
sessionCleanupInMinutes: number;
sessionConcurrentSessionsMaxSessions: number;
anonymousCredentialType: string | undefined;
}
@ -123,6 +124,12 @@ export function registerSecurityUsageCollector({ usageCollection, config, licens
'The session cleanup interval that is configured, in minutes (0 if disabled).',
},
},
sessionConcurrentSessionsMaxSessions: {
type: 'long',
_meta: {
description: 'The maximum number of the concurrent user sessions (0 if not configured).',
},
},
anonymousCredentialType: {
type: 'keyword',
_meta: {
@ -144,6 +151,7 @@ export function registerSecurityUsageCollector({ usageCollection, config, licens
sessionIdleTimeoutInMinutes: 0,
sessionLifespanInMinutes: 0,
sessionCleanupInMinutes: 0,
sessionConcurrentSessionsMaxSessions: 0,
anonymousCredentialType: undefined,
};
}
@ -172,6 +180,8 @@ export function registerSecurityUsageCollector({ usageCollection, config, licens
const sessionIdleTimeoutInMinutes = sessionExpirations.idleTimeout?.asMinutes() ?? 0;
const sessionLifespanInMinutes = sessionExpirations.lifespan?.asMinutes() ?? 0;
const sessionCleanupInMinutes = config.session.cleanupInterval?.asMinutes() ?? 0;
const sessionConcurrentSessionsMaxSessions =
config.session.concurrentSessions?.maxSessions ?? 0;
const anonProviders = config.authc.providers.anonymous ?? ({} as Record<string, any>);
const foundProvider = Object.entries(anonProviders).find(
@ -201,6 +211,7 @@ export function registerSecurityUsageCollector({ usageCollection, config, licens
sessionIdleTimeoutInMinutes,
sessionLifespanInMinutes,
sessionCleanupInMinutes,
sessionConcurrentSessionsMaxSessions,
anonymousCredentialType,
};
},

View file

@ -9534,6 +9534,12 @@
"description": "The session cleanup interval that is configured, in minutes (0 if disabled)."
}
},
"sessionConcurrentSessionsMaxSessions": {
"type": "long",
"_meta": {
"description": "The maximum number of the concurrent user sessions (0 if not configured)."
}
},
"anonymousCredentialType": {
"type": "keyword",
"_meta": {

View file

@ -0,0 +1,67 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { resolve } from 'path';
import { FtrConfigProviderContext } from '@kbn/test';
import { services } from './services';
// the default export of config files must be a config provider
// that returns an object with the projects config values
export default async function ({ readConfigFile }: FtrConfigProviderContext) {
const xPackAPITestsConfig = await readConfigFile(require.resolve('../api_integration/config.ts'));
const kibanaPort = xPackAPITestsConfig.get('servers.kibana.port');
const idpPath = resolve(__dirname, './fixtures/saml/idp_metadata.xml');
const testEndpointsPlugin = resolve(__dirname, '../security_functional/plugins/test_endpoints');
return {
testFiles: [resolve(__dirname, './tests/session_concurrent_limit')],
services,
servers: xPackAPITestsConfig.get('servers'),
esTestCluster: {
...xPackAPITestsConfig.get('esTestCluster'),
serverArgs: [
...xPackAPITestsConfig.get('esTestCluster.serverArgs'),
'xpack.security.authc.token.enabled=true',
'xpack.security.authc.realms.native.native1.order=0',
'xpack.security.authc.realms.saml.saml1.order=1',
`xpack.security.authc.realms.saml.saml1.idp.metadata.path=${idpPath}`,
'xpack.security.authc.realms.saml.saml1.idp.entity_id=http://www.elastic.co/saml1',
`xpack.security.authc.realms.saml.saml1.sp.entity_id=http://localhost:${kibanaPort}`,
`xpack.security.authc.realms.saml.saml1.sp.logout=http://localhost:${kibanaPort}/logout`,
`xpack.security.authc.realms.saml.saml1.sp.acs=http://localhost:${kibanaPort}/api/security/saml/callback`,
'xpack.security.authc.realms.saml.saml1.attributes.principal=urn:oid:0.0.7',
],
},
kbnTestServer: {
...xPackAPITestsConfig.get('kbnTestServer'),
serverArgs: [
...xPackAPITestsConfig.get('kbnTestServer.serverArgs'),
`--plugin-path=${testEndpointsPlugin}`,
'--xpack.security.session.concurrentSessions.maxSessions=2',
`--xpack.security.authc.providers=${JSON.stringify({
basic: { basic1: { order: 0 } },
saml: { saml1: { order: 1, realm: 'saml1' } },
anonymous: {
anonymous1: {
order: 3,
credentials: { username: 'anonymous_user', password: 'changeme' },
},
},
})}`,
// Exclude Uptime tasks to not interfere (additional ES load) with the session cleanup task.
`--xpack.task_manager.unsafe.exclude_task_types=${JSON.stringify(['UPTIME:*'])}`,
],
},
junit: {
reportName: 'X-Pack Security API Integration Tests (Session Concurrent Limit)',
},
};
}

View file

@ -0,0 +1,385 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { parse as parseCookie, Cookie } from 'tough-cookie';
import { setTimeout as setTimeoutAsync } from 'timers/promises';
import expect from '@kbn/expect';
import { adminTestUser } from '@kbn/test';
import type { AuthenticationProvider } from '@kbn/security-plugin/common';
import {
AggregateName,
AggregationsMultiTermsAggregate,
AggregationsMultiTermsBucket,
AggregationsTopHitsAggregate,
SearchTotalHits,
} from '@elastic/elasticsearch/lib/api/types';
import { FtrProviderContext } from '../../ftr_provider_context';
import { getSAMLRequestId, getSAMLResponse } from '../../fixtures/saml/saml_tools';
export default function ({ getService }: FtrProviderContext) {
const supertest = getService('supertestWithoutAuth');
const es = getService('es');
const security = getService('security');
const esDeleteAllIndices = getService('esDeleteAllIndices');
const config = getService('config');
const log = getService('log');
const randomness = getService('randomness');
const testUser = { username: 'test_user', password: 'changeme' };
const basicProvider = { type: 'basic', name: 'basic1' };
const samlProvider = { type: 'saml', name: 'saml1' };
const anonymousProvider = { type: 'anonymous', name: 'anonymous1' };
const kibanaServerConfig = config.get('servers.kibana');
async function checkSessionCookie(
sessionCookie: Cookie,
username: string,
provider: AuthenticationProvider
) {
const apiResponse = await supertest
.get('/internal/security/me')
.set('kbn-xsrf', 'xxx')
.set('Cookie', sessionCookie.cookieString())
.expect(200);
expect(apiResponse.body.username).to.be(username);
expect(apiResponse.body.authentication_provider).to.eql(provider);
return Array.isArray(apiResponse.headers['set-cookie'])
? parseCookie(apiResponse.headers['set-cookie'][0])!
: undefined;
}
async function checkSessionCookieInvalid(sessionCookie: Cookie) {
await supertest
.get('/internal/security/me')
.set('kbn-xsrf', 'xxx')
.set('Cookie', sessionCookie.cookieString())
.expect(401);
}
async function getNumberOfSessionDocuments() {
await es.indices.refresh({ index: '.kibana_security_session*' });
const sessionDocuments = await es.search({ index: '.kibana_security_session*' });
log.debug(`Existing sessions: ${JSON.stringify(sessionDocuments.hits)}.`);
return (sessionDocuments.hits.total as SearchTotalHits).value;
}
async function loginWithBasic(credentials: { username: string; password: string }) {
const authenticationResponse = await supertest
.post('/internal/security/login')
.set('kbn-xsrf', 'xxx')
.send({
providerType: basicProvider.type,
providerName: basicProvider.name,
currentURL: '/',
params: credentials,
})
.expect(200);
return parseCookie(authenticationResponse.headers['set-cookie'][0])!;
}
async function startSAMLHandshake() {
const handshakeResponse = await supertest
.post('/internal/security/login')
.set('kbn-xsrf', 'xxx')
.send({ providerType: samlProvider.type, providerName: samlProvider.name, currentURL: '' })
.expect(200);
return {
cookie: parseCookie(handshakeResponse.headers['set-cookie'][0])!,
location: handshakeResponse.body.location,
};
}
async function finishSAMLHandshake(handshakeCookie: Cookie, handshakeLocation: string) {
const authenticationResponse = await supertest
.post('/api/security/saml/callback')
.set('kbn-xsrf', 'xxx')
.set('Cookie', handshakeCookie.cookieString())
.send({
SAMLResponse: await getSAMLResponse({
destination: `http://localhost:${kibanaServerConfig.port}/api/security/saml/callback`,
sessionIndex: String(randomness.naturalNumber()),
inResponseTo: await getSAMLRequestId(handshakeLocation),
}),
})
.expect(302);
return parseCookie(authenticationResponse.headers['set-cookie'][0])!;
}
async function loginWithSAML() {
const { cookie, location } = await startSAMLHandshake();
return finishSAMLHandshake(cookie, location);
}
async function loginWithAnonymous() {
const authenticationResponse = await supertest
.post('/internal/security/login')
.set('kbn-xsrf', 'xxx')
.send({
providerType: anonymousProvider.type,
providerName: anonymousProvider.name,
currentURL: '/',
})
.expect(200);
return parseCookie(authenticationResponse.headers['set-cookie'][0])!;
}
async function toggleSessionCleanupTask(enabled: boolean) {
await supertest
.post('/session/toggle_cleanup_task')
.set('kbn-xsrf', 'xxx')
.auth(adminTestUser.username, adminTestUser.password)
.send({ enabled })
.expect(200);
}
describe('Session Concurrent Limit cleanup', () => {
before(async () => {
await security.user.create('anonymous_user', {
password: 'changeme',
roles: [],
full_name: 'Guest',
});
});
after(async () => {
await security.user.delete('anonymous_user');
});
beforeEach(async function () {
this.timeout(120000);
await toggleSessionCleanupTask(false);
await es.cluster.health({ index: '.kibana_security_session*', wait_for_status: 'green' });
await esDeleteAllIndices('.kibana_security_session*');
});
it('should properly clean up sessions that exceeded concurrent session limit', async function () {
this.timeout(100000);
log.debug(`Log in as ${testUser.username} 3 times with a 0.5s delay.`);
const basicSessionCookieOne = await loginWithBasic(testUser);
await setTimeoutAsync(500);
const basicSessionCookieTwo = await loginWithBasic(testUser);
await setTimeoutAsync(500);
const basicSessionCookieThree = await loginWithBasic(testUser);
expect(await getNumberOfSessionDocuments()).to.be(3);
// Let's wait for 60s to make sure cleanup routine runs after it was enabled.
log.debug('Waiting for cleanup job to run...');
await toggleSessionCleanupTask(true);
await setTimeoutAsync(60000);
// The oldest session should have been removed, but the rest should still be valid.
expect(await getNumberOfSessionDocuments()).to.be(2);
await checkSessionCookieInvalid(basicSessionCookieOne);
await checkSessionCookie(basicSessionCookieTwo, testUser.username, basicProvider);
await checkSessionCookie(basicSessionCookieThree, testUser.username, basicProvider);
});
it('should properly clean up sessions that exceeded concurrent session limit even for multiple providers', async function () {
this.timeout(100000);
log.debug(`Log in as ${testUser.username} and SAML user 3 times each with a 0.5s delay.`);
const basicSessionCookieOne = await loginWithBasic(testUser);
const samlSessionCookieOne = await loginWithSAML();
await setTimeoutAsync(500);
const basicSessionCookieTwo = await loginWithBasic(testUser);
const samlSessionCookieTwo = await loginWithSAML();
await setTimeoutAsync(500);
const basicSessionCookieThree = await loginWithBasic(testUser);
const samlSessionCookieThree = await loginWithSAML();
expect(await getNumberOfSessionDocuments()).to.be(6);
// Let's wait for 60s to make sure cleanup routine runs after it was enabled.
log.debug('Waiting for cleanup job to run...');
await toggleSessionCleanupTask(true);
await setTimeoutAsync(60000);
// The oldest session should have been removed, but the rest should still be valid.
expect(await getNumberOfSessionDocuments()).to.be(4);
await checkSessionCookieInvalid(basicSessionCookieOne);
await checkSessionCookie(basicSessionCookieTwo, testUser.username, basicProvider);
await checkSessionCookie(basicSessionCookieThree, testUser.username, basicProvider);
await checkSessionCookieInvalid(samlSessionCookieOne);
await checkSessionCookie(samlSessionCookieTwo, 'a@b.c', samlProvider);
await checkSessionCookie(samlSessionCookieThree, 'a@b.c', samlProvider);
});
it('should properly clean up sessions that exceeded concurrent session limit when legacy sessions are present', async function () {
this.timeout(100000);
log.debug(`Log in as ${testUser.username} and SAML user 3 times each with a 0.5s delay.`);
const basicSessionCookieOne = await loginWithBasic(testUser);
const samlSessionCookieOne = await loginWithSAML();
await setTimeoutAsync(500);
const basicSessionCookieTwo = await loginWithBasic(testUser);
const samlSessionCookieTwo = await loginWithSAML();
await setTimeoutAsync(500);
const basicSessionCookieThree = await loginWithBasic(testUser);
const samlSessionCookieThree = await loginWithSAML();
expect(await getNumberOfSessionDocuments()).to.be(6);
// Remove `createdAt` field from the most recent sessions to emulate legacy sessions.
// 1. Get the latest session for every unique credentials.
const aggResponse = await es.search<
unknown,
Record<AggregateName, AggregationsMultiTermsAggregate>
>({
index: '.kibana_security_session*',
size: 0,
filter_path: 'aggregations.sessions.buckets.top.hits.hits._id',
aggs: {
sessions: {
multi_terms: { terms: [{ field: 'usernameHash' }, { field: 'provider.type' }] },
aggs: { top: { top_hits: { sort: [{ createdAt: { order: 'desc' } }], size: 1 } } },
},
},
});
// 2. Extract session IDs from the nested top_hits aggregation.
const sessionIds =
(aggResponse.aggregations?.sessions.buckets as AggregationsMultiTermsBucket[]).flatMap(
(bucket) => {
const sessionId = (bucket.top as AggregationsTopHitsAggregate).hits?.hits?.[0]?._id;
return sessionId ? [sessionId] : [];
}
) ?? [];
expect(sessionIds.length).to.be(2);
// 3. Remove `createdAt` field for the latest sessions emulating legacy sessions.
await es.updateByQuery({
index: '.kibana_security_session*',
body: { script: 'ctx._source.remove("createdAt")', query: { ids: { values: sessionIds } } },
refresh: true,
});
// Let's wait for 60s to make sure cleanup routine runs after it was enabled.
log.debug('Waiting for cleanup job to run...');
await toggleSessionCleanupTask(true);
await setTimeoutAsync(60000);
// The oldest session should have been removed, but the rest should still be valid.
expect(await getNumberOfSessionDocuments()).to.be(4);
await checkSessionCookie(basicSessionCookieOne, testUser.username, basicProvider);
await checkSessionCookie(basicSessionCookieTwo, testUser.username, basicProvider);
await checkSessionCookieInvalid(basicSessionCookieThree);
await checkSessionCookie(samlSessionCookieOne, 'a@b.c', samlProvider);
await checkSessionCookie(samlSessionCookieTwo, 'a@b.c', samlProvider);
await checkSessionCookieInvalid(samlSessionCookieThree);
});
it('should not clean up session if the limit is not exceeded', async function () {
this.timeout(100000);
log.debug(`Log in as ${testUser.username} 2 times with a 0.5s delay.`);
const basicSessionCookieOne = await loginWithBasic(testUser);
await setTimeoutAsync(500);
const basicSessionCookieTwo = await loginWithBasic(testUser);
expect(await getNumberOfSessionDocuments()).to.be(2);
// Let's wait for 60s to make sure cleanup routine runs after it was enabled.
log.debug('Waiting for cleanup job to run...');
await toggleSessionCleanupTask(true);
await setTimeoutAsync(60000);
// The oldest session should have been removed, but the rest should still be valid.
expect(await getNumberOfSessionDocuments()).to.be(2);
await checkSessionCookie(basicSessionCookieOne, testUser.username, basicProvider);
await checkSessionCookie(basicSessionCookieTwo, testUser.username, basicProvider);
});
it('should not clean up sessions of the anonymous users', async function () {
this.timeout(100000);
log.debug(`Log in as anonymous_user 3 times.`);
const anonymousSessionCookieOne = await loginWithAnonymous();
const anonymousSessionCookieTwo = await loginWithAnonymous();
const anonymousSessionCookieThree = await loginWithAnonymous();
expect(await getNumberOfSessionDocuments()).to.be(3);
// Let's wait for 60s to make sure cleanup routine runs after it was enabled.
log.debug('Waiting for cleanup job to run...');
await toggleSessionCleanupTask(true);
await setTimeoutAsync(60000);
// The oldest session should have been removed, but the rest should still be valid.
expect(await getNumberOfSessionDocuments()).to.be(3);
// All sessions should be active.
for (const anonymousSessionCookie of [
anonymousSessionCookieOne,
anonymousSessionCookieTwo,
anonymousSessionCookieThree,
]) {
await checkSessionCookie(anonymousSessionCookie, 'anonymous_user', anonymousProvider);
}
});
it('should not clean up unauthenticated sessions', async function () {
this.timeout(100000);
log.debug(`Starting SAML handshake 3 times.`);
const unauthenticatedSessionOne = await startSAMLHandshake();
const unauthenticatedSessionTwo = await startSAMLHandshake();
const unauthenticatedSessionThree = await startSAMLHandshake();
expect(await getNumberOfSessionDocuments()).to.be(3);
// Let's wait for 60s to make sure cleanup routine runs after it was enabled.
log.debug('Waiting for cleanup job to run...');
await toggleSessionCleanupTask(true);
await setTimeoutAsync(60000);
// The oldest session should have been removed, but the rest should still be valid.
expect(await getNumberOfSessionDocuments()).to.be(3);
// Finish SAML handshake (all should succeed since we don't enforce limit at session creation time).
const samlSessionCookieOne = await finishSAMLHandshake(
unauthenticatedSessionOne.cookie,
unauthenticatedSessionOne.location
);
await setTimeoutAsync(500);
const samlSessionCookieTwo = await finishSAMLHandshake(
unauthenticatedSessionTwo.cookie,
unauthenticatedSessionTwo.location
);
await setTimeoutAsync(500);
const samlSessionCookieThree = await finishSAMLHandshake(
unauthenticatedSessionThree.cookie,
unauthenticatedSessionThree.location
);
// For authenticated sessions limit should be enforced
await checkSessionCookieInvalid(samlSessionCookieOne);
await checkSessionCookie(samlSessionCookieTwo, 'a@b.c', samlProvider);
await checkSessionCookie(samlSessionCookieThree, 'a@b.c', samlProvider);
});
});
}

View file

@ -0,0 +1,267 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { parse as parseCookie, Cookie } from 'tough-cookie';
import expect from '@kbn/expect';
import { adminTestUser } from '@kbn/test';
import type { AuthenticationProvider } from '@kbn/security-plugin/common/model';
import { getSAMLRequestId, getSAMLResponse } from '../../fixtures/saml/saml_tools';
import { FtrProviderContext } from '../../ftr_provider_context';
export default function ({ getService }: FtrProviderContext) {
const supertest = getService('supertestWithoutAuth');
const es = getService('es');
const security = getService('security');
const config = getService('config');
const log = getService('log');
const randomness = getService('randomness');
const kibanaServerConfig = config.get('servers.kibana');
const testUser = { username: 'test_user', password: 'changeme' };
const basicProvider = { type: 'basic', name: 'basic1' };
const samlProvider = { type: 'saml', name: 'saml1' };
const anonymousProvider = { type: 'anonymous', name: 'anonymous1' };
async function checkSessionCookie(
sessionCookie: Cookie,
username: string,
provider: AuthenticationProvider
) {
const apiResponse = await supertest
.get('/internal/security/me')
.set('kbn-xsrf', 'xxx')
.set('Cookie', sessionCookie.cookieString())
.expect(200);
expect(apiResponse.body.username).to.be(username);
expect(apiResponse.body.authentication_provider).to.eql(provider);
return Array.isArray(apiResponse.headers['set-cookie'])
? parseCookie(apiResponse.headers['set-cookie'][0])!
: undefined;
}
async function checkSessionCookieInvalid(sessionCookie: Cookie) {
await supertest
.get('/internal/security/me')
.set('kbn-xsrf', 'xxx')
.set('Cookie', sessionCookie.cookieString())
.expect(401);
}
async function loginWithSAML() {
const handshakeResponse = await supertest
.post('/internal/security/login')
.set('kbn-xsrf', 'xxx')
.send({ providerType: samlProvider.type, providerName: samlProvider.name, currentURL: '' })
.expect(200);
const authenticationResponse = await supertest
.post('/api/security/saml/callback')
.set('kbn-xsrf', 'xxx')
.set('Cookie', parseCookie(handshakeResponse.headers['set-cookie'][0])!.cookieString())
.send({
SAMLResponse: await getSAMLResponse({
destination: `http://localhost:${kibanaServerConfig.port}/api/security/saml/callback`,
sessionIndex: String(randomness.naturalNumber()),
inResponseTo: await getSAMLRequestId(handshakeResponse.body.location),
}),
})
.expect(302);
return parseCookie(authenticationResponse.headers['set-cookie'][0])!;
}
async function loginWithBasic(credentials: { username: string; password: string }) {
const authenticationResponse = await supertest
.post('/internal/security/login')
.set('kbn-xsrf', 'xxx')
.send({
providerType: basicProvider.type,
providerName: basicProvider.name,
currentURL: '/',
params: credentials,
})
.expect(200);
return parseCookie(authenticationResponse.headers['set-cookie'][0])!;
}
async function loginWithAnonymous() {
const authenticationResponse = await supertest
.post('/internal/security/login')
.set('kbn-xsrf', 'xxx')
.send({
providerType: anonymousProvider.type,
providerName: anonymousProvider.name,
currentURL: '/',
})
.expect(200);
return parseCookie(authenticationResponse.headers['set-cookie'][0])!;
}
async function toggleSessionCleanupTask(enabled: boolean) {
await supertest
.post('/session/toggle_cleanup_task')
.set('kbn-xsrf', 'xxx')
.auth(adminTestUser.username, adminTestUser.password)
.send({ enabled })
.expect(200);
}
describe('Session Global Concurrent Limit', () => {
before(async function () {
this.timeout(120000);
// Disable cleanup task to not interfere with the tests.
await toggleSessionCleanupTask(false);
await security.user.create('anonymous_user', {
password: 'changeme',
roles: [],
full_name: 'Guest',
});
});
after(async () => {
// Enable cleanup task again.
await toggleSessionCleanupTask(true);
await security.user.delete('anonymous_user');
});
beforeEach(async () => {
await security.testUser.setRoles(['kibana_admin']);
await es.cluster.health({ index: '.kibana_security_session*', wait_for_status: 'green' });
await supertest
.post('/api/security/session/_invalidate')
.set('kbn-xsrf', 'xxx')
.auth(adminTestUser.username, adminTestUser.password)
.send({ match: 'all' })
.expect(200);
});
it('should properly enforce session limit with single provider', async function () {
const basicSessionCookieOne = await loginWithBasic(testUser);
await checkSessionCookie(basicSessionCookieOne, testUser.username, basicProvider);
const basicSessionCookieTwo = await loginWithBasic(testUser);
await checkSessionCookie(basicSessionCookieOne, testUser.username, basicProvider);
await checkSessionCookie(basicSessionCookieTwo, testUser.username, basicProvider);
// The oldest session should be displaced.
const basicSessionCookieThree = await loginWithBasic(testUser);
await checkSessionCookieInvalid(basicSessionCookieOne);
await checkSessionCookie(basicSessionCookieTwo, testUser.username, basicProvider);
await checkSessionCookie(basicSessionCookieThree, testUser.username, basicProvider);
// The next oldest session should be displaced as well.
const basicSessionCookieFour = await loginWithBasic(testUser);
await checkSessionCookieInvalid(basicSessionCookieTwo);
await checkSessionCookie(basicSessionCookieThree, testUser.username, basicProvider);
await checkSessionCookie(basicSessionCookieFour, testUser.username, basicProvider);
});
it('should properly enforce session limit with single provider and multiple users', async function () {
const basicSessionCookieOne = await loginWithBasic(testUser);
const basicSessionCookieTwo = await loginWithBasic(testUser);
const basicSessionCookieThree = await loginWithBasic(adminTestUser);
const basicSessionCookieFour = await loginWithBasic(adminTestUser);
// All sessions should be active.
await checkSessionCookie(basicSessionCookieOne, testUser.username, basicProvider);
await checkSessionCookie(basicSessionCookieTwo, testUser.username, basicProvider);
await checkSessionCookie(basicSessionCookieThree, adminTestUser.username, basicProvider);
await checkSessionCookie(basicSessionCookieFour, adminTestUser.username, basicProvider);
// The oldest session of the admin user should be displaced.
const basicSessionCookieFive = await loginWithBasic(adminTestUser);
await checkSessionCookie(basicSessionCookieOne, testUser.username, basicProvider);
await checkSessionCookie(basicSessionCookieTwo, testUser.username, basicProvider);
await checkSessionCookieInvalid(basicSessionCookieThree);
await checkSessionCookie(basicSessionCookieFour, adminTestUser.username, basicProvider);
await checkSessionCookie(basicSessionCookieFive, adminTestUser.username, basicProvider);
// The next oldest session of the admin user should be displaced as well.
const basicSessionCookieSix = await loginWithBasic(adminTestUser);
await checkSessionCookie(basicSessionCookieOne, testUser.username, basicProvider);
await checkSessionCookie(basicSessionCookieTwo, testUser.username, basicProvider);
await checkSessionCookieInvalid(basicSessionCookieFour);
await checkSessionCookie(basicSessionCookieFive, adminTestUser.username, basicProvider);
await checkSessionCookie(basicSessionCookieSix, adminTestUser.username, basicProvider);
// Only the oldest session of the ordinary user should be displaced.
const basicSessionCookieSeven = await loginWithBasic(testUser);
await checkSessionCookieInvalid(basicSessionCookieOne);
await checkSessionCookie(basicSessionCookieTwo, testUser.username, basicProvider);
await checkSessionCookie(basicSessionCookieFive, adminTestUser.username, basicProvider);
await checkSessionCookie(basicSessionCookieSix, adminTestUser.username, basicProvider);
await checkSessionCookie(basicSessionCookieSeven, testUser.username, basicProvider);
});
it('should properly enforce session limit even for multiple concurrent logins', async function () {
const basicSessionCookies = await Promise.all(
Array.from({ length: 10 }).map(() => loginWithBasic(testUser))
);
// Since logins were concurrent we cannot know upfront their `createdAt` timestamps and
// hence which specific sessions will be outside the limit.
const statusCodes = [];
for (const basicSessionCookie of basicSessionCookies) {
const { statusCode } = await supertest
.get('/internal/security/me')
.set('kbn-xsrf', 'xxx')
.set('Cookie', basicSessionCookie.cookieString());
statusCodes.push(statusCode);
}
log.debug(`Collected status codes: ${JSON.stringify(statusCodes)}.`);
expect(statusCodes.filter((statusCode) => statusCode === 200)).to.have.length(2);
expect(statusCodes.filter((statusCode) => statusCode === 401)).to.have.length(8);
});
it('should properly enforce session limit with multiple providers', async function () {
const basicSessionCookieOne = await loginWithBasic(testUser);
const basicSessionCookieTwo = await loginWithBasic(testUser);
const samlSessionCookieOne = await loginWithSAML();
const samlSessionCookieTwo = await loginWithSAML();
// All sessions should be active.
await checkSessionCookie(basicSessionCookieOne, testUser.username, basicProvider);
await checkSessionCookie(basicSessionCookieTwo, testUser.username, basicProvider);
await checkSessionCookie(samlSessionCookieOne, 'a@b.c', samlProvider);
await checkSessionCookie(samlSessionCookieTwo, 'a@b.c', samlProvider);
// Exceed limit with SAML credentials, other sessions shouldn't be affected.
const samlSessionCookieThree = await loginWithSAML();
await checkSessionCookie(basicSessionCookieOne, testUser.username, basicProvider);
await checkSessionCookie(basicSessionCookieTwo, testUser.username, basicProvider);
await checkSessionCookieInvalid(samlSessionCookieOne);
await checkSessionCookie(samlSessionCookieTwo, 'a@b.c', samlProvider);
await checkSessionCookie(samlSessionCookieThree, 'a@b.c', samlProvider);
// Exceed limit with Basic credentials, other sessions shouldn't be affected.
const basicSessionCookieThree = await loginWithBasic(testUser);
await checkSessionCookieInvalid(basicSessionCookieOne);
await checkSessionCookie(basicSessionCookieTwo, testUser.username, basicProvider);
await checkSessionCookie(basicSessionCookieThree, testUser.username, basicProvider);
await checkSessionCookie(samlSessionCookieTwo, 'a@b.c', samlProvider);
await checkSessionCookie(samlSessionCookieThree, 'a@b.c', samlProvider);
});
it('should not enforce session limit for anonymous users', async function () {
// All sessions should be active.
for (const anonymousSessionCookie of [
await loginWithAnonymous(),
await loginWithAnonymous(),
await loginWithAnonymous(),
await loginWithAnonymous(),
]) {
await checkSessionCookie(anonymousSessionCookie, 'anonymous_user', anonymousProvider);
}
});
});
}

View file

@ -0,0 +1,15 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { FtrProviderContext } from '../../ftr_provider_context';
export default function ({ loadTestFile }: FtrProviderContext) {
describe('security APIs - Session Concurrent Limit', function () {
loadTestFile(require.resolve('./cleanup'));
loadTestFile(require.resolve('./global_limit'));
});
}

View file

@ -3,7 +3,7 @@
"owner": { "name": "Platform Security", "githubTeam": "kibana-security" },
"version": "8.0.0",
"kibanaVersion": "kibana",
"requiredPlugins":["security"],
"requiredPlugins":["security", "taskManager"],
"server": true,
"ui": true
}

View file

@ -5,11 +5,25 @@
* 2.0.
*/
import { PluginInitializer, Plugin } from '@kbn/core/server';
import { PluginInitializer, Plugin, CoreSetup } from '@kbn/core/server';
import {
TaskManagerSetupContract,
TaskManagerStartContract,
} from '@kbn/task-manager-plugin/server';
import { initRoutes } from './init_routes';
export const plugin: PluginInitializer<void, void> = (initializerContext): Plugin => ({
setup: (core) => initRoutes(initializerContext, core),
export interface PluginSetupDependencies {
taskManager: TaskManagerSetupContract;
}
export interface PluginStartDependencies {
taskManager: TaskManagerStartContract;
}
export const plugin: PluginInitializer<void, void> = (
initializerContext
): Plugin<void, void, PluginSetupDependencies, PluginStartDependencies> => ({
setup: (core: CoreSetup<PluginStartDependencies>) => initRoutes(initializerContext, core),
start: () => {},
stop: () => {},
});

View file

@ -7,9 +7,20 @@
import { schema } from '@kbn/config-schema';
import { errors } from '@elastic/elasticsearch';
import { CoreSetup, PluginInitializerContext } from '@kbn/core/server';
import { CoreSetup, CoreStart, PluginInitializerContext } from '@kbn/core/server';
import type {
TaskManagerStartContract,
ConcreteTaskInstance,
BulkUpdateTaskResult,
} from '@kbn/task-manager-plugin/server';
import { PluginStartDependencies } from '.';
export function initRoutes(initializerContext: PluginInitializerContext, core: CoreSetup) {
export const SESSION_INDEX_CLEANUP_TASK_NAME = 'session_cleanup';
export function initRoutes(
initializerContext: PluginInitializerContext,
core: CoreSetup<PluginStartDependencies>
) {
const logger = initializerContext.logger.get();
const authenticationAppOptions = { simulateUnauthorized: false };
@ -96,4 +107,127 @@ export function initRoutes(initializerContext: PluginInitializerContext, core: C
}
}
);
async function waitUntilTaskIsIdle(taskManager: TaskManagerStartContract) {
logger.info(`Waiting until session cleanup task is in idle.`);
const RETRY_SCALE_DURATION = 1000;
let retriesElapsed = 0;
let taskInstance: ConcreteTaskInstance;
while (retriesElapsed < 15 /** max around ~100s **/) {
await new Promise((resolve) => setTimeout(resolve, retriesElapsed * RETRY_SCALE_DURATION));
try {
taskInstance = await taskManager.get(SESSION_INDEX_CLEANUP_TASK_NAME);
if (taskInstance.status === 'idle') {
logger.info(`Session cleanup task is in idle state: ${JSON.stringify(taskInstance)}.`);
return;
}
} catch (err) {
logger.error(`Failed to fetch task: ${err?.message || err}.`);
throw err;
}
if (++retriesElapsed < 15) {
logger.warn(
`Session cleanup task is NOT in idle state (waiting for ${
retriesElapsed * RETRY_SCALE_DURATION
}ms before retrying): ${JSON.stringify(taskInstance)}.`
);
} else {
logger.error(
`Failed to wait until session cleanup tasks enters an idle state: ${JSON.stringify(
taskInstance
)}.`
);
}
}
}
async function refreshTaskManagerIndex(
enabled: boolean,
coreStart: CoreStart,
taskManager: TaskManagerStartContract
) {
// Refresh task manager index before trying to modify a task document.
// Might not be needed once https://github.com/elastic/kibana/pull/148985 is merged.
try {
logger.info(
`Refreshing task manager index (enabled: ${enabled}), current task: ${JSON.stringify(
await taskManager.get(SESSION_INDEX_CLEANUP_TASK_NAME)
)}...`
);
const refreshResult = await coreStart.elasticsearch.client.asInternalUser.indices.refresh({
index: '.kibana_task_manager',
expand_wildcards: 'all',
});
logger.info(
`Successfully refreshed task manager index (enabled: ${enabled}), refresh result: ${JSON.stringify(
refreshResult
)}, current task: ${JSON.stringify(
await taskManager.get(SESSION_INDEX_CLEANUP_TASK_NAME)
)}.`
);
} catch (err) {
logger.error(
`Failed to refresh task manager index (enabled: ${enabled}): ${err?.message || err}.`
);
}
}
router.post(
{
path: '/session/toggle_cleanup_task',
validate: { body: schema.object({ enabled: schema.boolean() }) },
},
async (context, request, response) => {
const [coreStart, { taskManager }] = await core.getStartServices();
logger.info(`Toggle session cleanup task (enabled: ${request.body.enabled}).`);
await refreshTaskManagerIndex(request.body.enabled, coreStart, taskManager);
let bulkEnableDisableResult: BulkUpdateTaskResult;
try {
if (request.body.enabled) {
logger.info(
`Going to enable the following task: ${JSON.stringify(
await taskManager.get(SESSION_INDEX_CLEANUP_TASK_NAME)
)}.`
);
bulkEnableDisableResult = await taskManager.bulkEnable(
[SESSION_INDEX_CLEANUP_TASK_NAME],
true /** runSoon **/
);
} else {
bulkEnableDisableResult = await taskManager.bulkDisable([
SESSION_INDEX_CLEANUP_TASK_NAME,
]);
}
await refreshTaskManagerIndex(request.body.enabled, coreStart, taskManager);
// Make sure that the task enters idle state before acknowledging that task was disabled.
if (!request.body.enabled) {
await waitUntilTaskIsIdle(taskManager);
}
} catch (err) {
logger.error(
`Failed to toggle session cleanup task (enabled: ${request.body.enabled}): ${
err?.message || err
}.`
);
throw err;
}
logger.info(
`Successfully toggled session cleanup task (enabled: ${
request.body.enabled
}, enable/disable response: ${JSON.stringify(bulkEnableDisableResult)}).`
);
return response.ok();
}
);
}

View file

@ -13,6 +13,7 @@
"kbn_references": [
"@kbn/core",
"@kbn/security-plugin",
"@kbn/task-manager-plugin",
"@kbn/config-schema",
]
}