mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
[file_upload] add has_import_permission route (#95190)
* [file_upload] add has_import_permission route * remove ml hasImportPermissions * fix tsconfig path * tslint * review feedback * make pipeline check optional since geojson upload does not use pipeline * ts cleanup * make geojson permission failure message actionable * revert privilege change in functional test * add global_index_pattern_management_all permission to functional test * rename hasPipeline to checkHasManagePipeline * add api integration test * tslint * revert change to es_search_source * simpilify error message when users can't create index pattern Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
9d8a2f183e
commit
81b46931f8
23 changed files with 314 additions and 56 deletions
|
@ -7,6 +7,10 @@
|
|||
|
||||
import { ES_FIELD_TYPES } from '../../../../src/plugins/data/common';
|
||||
|
||||
export interface HasImportPermission {
|
||||
hasImportPermission: boolean;
|
||||
}
|
||||
|
||||
export interface InputOverrides {
|
||||
[key: string]: string | undefined;
|
||||
}
|
||||
|
|
|
@ -5,5 +5,6 @@
|
|||
"server": true,
|
||||
"ui": true,
|
||||
"requiredPlugins": ["data", "usageCollection"],
|
||||
"optionalPlugins": ["security"],
|
||||
"requiredBundles": ["kibanaReact"]
|
||||
}
|
||||
|
|
|
@ -8,12 +8,14 @@
|
|||
import React from 'react';
|
||||
import { FileUploadComponentProps, lazyLoadFileUploadModules } from '../lazy_load_bundle';
|
||||
import type { IImporter, ImportFactoryOptions } from '../importer';
|
||||
import { HasImportPermission } from '../../common';
|
||||
|
||||
export interface FileUploadStartApi {
|
||||
getFileUploadComponent(): Promise<React.ComponentType<FileUploadComponentProps>>;
|
||||
importerFactory(format: string, options: ImportFactoryOptions): Promise<IImporter | undefined>;
|
||||
getMaxBytes(): number;
|
||||
getMaxBytesFormatted(): string;
|
||||
hasImportPermission(params: HasImportPermissionParams): Promise<boolean>;
|
||||
}
|
||||
|
||||
export async function getFileUploadComponent(): Promise<
|
||||
|
@ -30,3 +32,23 @@ export async function importerFactory(
|
|||
const fileUploadModules = await lazyLoadFileUploadModules();
|
||||
return fileUploadModules.importerFactory(format, options);
|
||||
}
|
||||
|
||||
interface HasImportPermissionParams {
|
||||
checkCreateIndexPattern: boolean;
|
||||
checkHasManagePipeline: boolean;
|
||||
indexName?: string;
|
||||
}
|
||||
|
||||
export async function hasImportPermission(params: HasImportPermissionParams): Promise<boolean> {
|
||||
const fileUploadModules = await lazyLoadFileUploadModules();
|
||||
try {
|
||||
const resp = await fileUploadModules.getHttp().fetch<HasImportPermission>({
|
||||
path: `/internal/file_upload/has_import_permission`,
|
||||
method: 'GET',
|
||||
query: { ...params },
|
||||
});
|
||||
return resp.hasImportPermission;
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,7 +8,9 @@
|
|||
import React from 'react';
|
||||
import { FeatureCollection } from 'geojson';
|
||||
import { IndexPattern } from 'src/plugins/data/public';
|
||||
import { HttpStart } from 'src/core/public';
|
||||
import { IImporter, ImportFactoryOptions, ImportResults } from '../importer';
|
||||
import { getHttp } from '../kibana_services';
|
||||
|
||||
export interface FileUploadComponentProps {
|
||||
isIndexingTriggered: boolean;
|
||||
|
@ -27,6 +29,7 @@ let loadModulesPromise: Promise<LazyLoadedFileUploadModules>;
|
|||
interface LazyLoadedFileUploadModules {
|
||||
JsonUploadAndParse: React.ComponentType<FileUploadComponentProps>;
|
||||
importerFactory: (format: string, options: ImportFactoryOptions) => IImporter | undefined;
|
||||
getHttp: () => HttpStart;
|
||||
}
|
||||
|
||||
export async function lazyLoadFileUploadModules(): Promise<LazyLoadedFileUploadModules> {
|
||||
|
@ -40,6 +43,7 @@ export async function lazyLoadFileUploadModules(): Promise<LazyLoadedFileUploadM
|
|||
resolve({
|
||||
JsonUploadAndParse,
|
||||
importerFactory,
|
||||
getHttp,
|
||||
});
|
||||
});
|
||||
return loadModulesPromise;
|
||||
|
|
|
@ -6,7 +6,12 @@
|
|||
*/
|
||||
|
||||
import { CoreStart, Plugin } from '../../../../src/core/public';
|
||||
import { FileUploadStartApi, getFileUploadComponent, importerFactory } from './api';
|
||||
import {
|
||||
FileUploadStartApi,
|
||||
getFileUploadComponent,
|
||||
importerFactory,
|
||||
hasImportPermission,
|
||||
} from './api';
|
||||
import { setStartServices } from './kibana_services';
|
||||
import { DataPublicPluginStart } from '../../../../src/plugins/data/public';
|
||||
import { getMaxBytes, getMaxBytesFormatted } from './get_max_bytes';
|
||||
|
@ -37,6 +42,7 @@ export class FileUploadPlugin
|
|||
importerFactory,
|
||||
getMaxBytes,
|
||||
getMaxBytesFormatted,
|
||||
hasImportPermission,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { PluginInitializerContext } from '../../../../src/core/server';
|
||||
import { FileUploadPlugin } from './plugin';
|
||||
|
||||
export const plugin = () => new FileUploadPlugin();
|
||||
export const plugin = (initializerContext: PluginInitializerContext) =>
|
||||
new FileUploadPlugin(initializerContext);
|
||||
|
|
|
@ -6,20 +6,27 @@
|
|||
*/
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { CoreSetup, CoreStart, Plugin } from 'src/core/server';
|
||||
import { CoreSetup, CoreStart, Logger, Plugin, PluginInitializerContext } from 'src/core/server';
|
||||
import { schema } from '@kbn/config-schema';
|
||||
import { fileUploadRoutes } from './routes';
|
||||
import { initFileUploadTelemetry } from './telemetry';
|
||||
import { UsageCollectionSetup } from '../../../../src/plugins/usage_collection/server';
|
||||
import { UI_SETTING_MAX_FILE_SIZE, MAX_FILE_SIZE } from '../common';
|
||||
import { StartDeps } from './types';
|
||||
|
||||
interface SetupDeps {
|
||||
usageCollection: UsageCollectionSetup;
|
||||
}
|
||||
|
||||
export class FileUploadPlugin implements Plugin {
|
||||
async setup(coreSetup: CoreSetup, plugins: SetupDeps) {
|
||||
fileUploadRoutes(coreSetup.http.createRouter());
|
||||
private readonly _logger: Logger;
|
||||
|
||||
constructor(initializerContext: PluginInitializerContext) {
|
||||
this._logger = initializerContext.logger.get();
|
||||
}
|
||||
|
||||
async setup(coreSetup: CoreSetup<StartDeps, unknown>, plugins: SetupDeps) {
|
||||
fileUploadRoutes(coreSetup, this._logger);
|
||||
|
||||
coreSetup.uiSettings.register({
|
||||
[UI_SETTING_MAX_FILE_SIZE]: {
|
||||
|
|
|
@ -6,7 +6,8 @@
|
|||
*/
|
||||
|
||||
import { schema } from '@kbn/config-schema';
|
||||
import { IRouter, IScopedClusterClient } from 'kibana/server';
|
||||
import { IScopedClusterClient } from 'kibana/server';
|
||||
import { CoreSetup, Logger } from 'src/core/server';
|
||||
import {
|
||||
MAX_FILE_SIZE_BYTES,
|
||||
IngestPipelineWrapper,
|
||||
|
@ -20,6 +21,8 @@ import { importDataProvider } from './import_data';
|
|||
|
||||
import { updateTelemetry } from './telemetry';
|
||||
import { analyzeFileQuerySchema, importFileBodySchema, importFileQuerySchema } from './schemas';
|
||||
import { CheckPrivilegesPayload } from '../../security/server';
|
||||
import { StartDeps } from './types';
|
||||
|
||||
function importData(
|
||||
client: IScopedClusterClient,
|
||||
|
@ -37,7 +40,55 @@ function importData(
|
|||
/**
|
||||
* Routes for the file upload.
|
||||
*/
|
||||
export function fileUploadRoutes(router: IRouter) {
|
||||
export function fileUploadRoutes(coreSetup: CoreSetup<StartDeps, unknown>, logger: Logger) {
|
||||
const router = coreSetup.http.createRouter();
|
||||
|
||||
router.get(
|
||||
{
|
||||
path: '/internal/file_upload/has_import_permission',
|
||||
validate: {
|
||||
query: schema.object({
|
||||
indexName: schema.maybe(schema.string()),
|
||||
checkCreateIndexPattern: schema.boolean(),
|
||||
checkHasManagePipeline: schema.boolean(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
async (context, request, response) => {
|
||||
try {
|
||||
const [, pluginsStart] = await coreSetup.getStartServices();
|
||||
const { indexName, checkCreateIndexPattern, checkHasManagePipeline } = request.query;
|
||||
|
||||
const authorizationService = pluginsStart.security?.authz;
|
||||
const requiresAuthz = authorizationService?.mode.useRbacForRequest(request) ?? false;
|
||||
|
||||
if (!authorizationService || !requiresAuthz) {
|
||||
return response.ok({ body: { hasImportPermission: true } });
|
||||
}
|
||||
|
||||
const checkPrivilegesPayload: CheckPrivilegesPayload = {
|
||||
elasticsearch: {
|
||||
cluster: checkHasManagePipeline ? ['manage_pipeline'] : [],
|
||||
index: indexName ? { [indexName]: ['create', 'create_index'] } : {},
|
||||
},
|
||||
};
|
||||
if (checkCreateIndexPattern) {
|
||||
checkPrivilegesPayload.kibana = [
|
||||
authorizationService.actions.savedObject.get('index-pattern', 'create'),
|
||||
];
|
||||
}
|
||||
|
||||
const checkPrivileges = authorizationService.checkPrivilegesDynamicallyWithRequest(request);
|
||||
const checkPrivilegesResp = await checkPrivileges(checkPrivilegesPayload);
|
||||
|
||||
return response.ok({ body: { hasImportPermission: checkPrivilegesResp.hasAllRequested } });
|
||||
} catch (e) {
|
||||
logger.warn(`Unable to check import permission, error: ${e.message}`);
|
||||
return response.ok({ body: { hasImportPermission: false } });
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* @apiGroup FileDataVisualizer
|
||||
*
|
||||
|
|
12
x-pack/plugins/file_upload/server/types.ts
Normal file
12
x-pack/plugins/file_upload/server/types.ts
Normal file
|
@ -0,0 +1,12 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { SecurityPluginStart } from '../..//security/server';
|
||||
|
||||
export interface StartDeps {
|
||||
security?: SecurityPluginStart;
|
||||
}
|
|
@ -11,6 +11,7 @@
|
|||
"references": [
|
||||
{ "path": "../../../src/core/tsconfig.json" },
|
||||
{ "path": "../../../src/plugins/data/tsconfig.json" },
|
||||
{ "path": "../../../src/plugins/usage_collection/tsconfig.json" }
|
||||
{ "path": "../../../src/plugins/usage_collection/tsconfig.json" },
|
||||
{ "path": "../security/tsconfig.json" },
|
||||
]
|
||||
}
|
||||
|
|
|
@ -9,12 +9,24 @@ import { i18n } from '@kbn/i18n';
|
|||
import React from 'react';
|
||||
import { LayerWizard, RenderWizardArguments } from '../../layers/layer_wizard_registry';
|
||||
import { ClientFileCreateSourceEditor, INDEX_SETUP_STEP_ID, INDEXING_STEP_ID } from './wizard';
|
||||
import { getFileUpload } from '../../../kibana_services';
|
||||
|
||||
export const uploadLayerWizardConfig: LayerWizard = {
|
||||
categories: [],
|
||||
description: i18n.translate('xpack.maps.fileUploadWizard.description', {
|
||||
defaultMessage: 'Index GeoJSON data in Elasticsearch',
|
||||
}),
|
||||
disabledReason: i18n.translate('xpack.maps.fileUploadWizard.disabledDesc', {
|
||||
defaultMessage:
|
||||
'Unable to upload files, you are missing the Kibana privilege "Index Pattern Management".',
|
||||
}),
|
||||
getIsDisabled: async () => {
|
||||
const hasImportPermission = await getFileUpload().hasImportPermission({
|
||||
checkCreateIndexPattern: true,
|
||||
checkHasManagePipeline: false,
|
||||
});
|
||||
return !hasImportPermission;
|
||||
},
|
||||
icon: 'importAction',
|
||||
prerequisiteSteps: [
|
||||
{
|
||||
|
|
|
@ -15,7 +15,7 @@ import {
|
|||
DEFAULT_MAX_RESULT_WINDOW,
|
||||
SCALING_TYPES,
|
||||
} from '../../../../common/constants';
|
||||
import { getFileUploadComponent } from '../../../kibana_services';
|
||||
import { getFileUpload } from '../../../kibana_services';
|
||||
import { GeoJsonFileSource } from '../../sources/geojson_file_source';
|
||||
import { VectorLayer } from '../../layers/vector_layer';
|
||||
import { createDefaultLayerDescriptor } from '../../sources/es_search_source';
|
||||
|
@ -65,7 +65,7 @@ export class ClientFileCreateSourceEditor extends Component<RenderWizardArgument
|
|||
}
|
||||
|
||||
async _loadFileUploadComponent() {
|
||||
const fileUploadComponent = await getFileUploadComponent();
|
||||
const fileUploadComponent = await getFileUpload().getFileUploadComponent();
|
||||
if (this._isMounted) {
|
||||
this.setState({ fileUploadComponent });
|
||||
}
|
||||
|
|
|
@ -26,9 +26,7 @@ export function setStartServices(core: CoreStart, plugins: MapsPluginStartDepend
|
|||
export const getIndexPatternService = () => pluginsStart.data.indexPatterns;
|
||||
export const getAutocompleteService = () => pluginsStart.data.autocomplete;
|
||||
export const getInspector = () => pluginsStart.inspector;
|
||||
export const getFileUploadComponent = async () => {
|
||||
return await pluginsStart.fileUpload.getFileUploadComponent();
|
||||
};
|
||||
export const getFileUpload = () => pluginsStart.fileUpload;
|
||||
export const getUiSettings = () => coreStart.uiSettings;
|
||||
export const getIsDarkMode = () => getUiSettings().get('theme:darkMode', false);
|
||||
export const getIndexPatternSelectComponent = () => pluginsStart.data.ui.IndexPatternSelect;
|
||||
|
|
|
@ -20,13 +20,7 @@ import { FileCouldNotBeRead, FileTooLarge } from './file_error_callouts';
|
|||
import { EditFlyout } from '../edit_flyout';
|
||||
import { ExplanationFlyout } from '../explanation_flyout';
|
||||
import { ImportView } from '../import_view';
|
||||
import {
|
||||
DEFAULT_LINES_TO_SAMPLE,
|
||||
readFile,
|
||||
createUrlOverrides,
|
||||
processResults,
|
||||
hasImportPermission,
|
||||
} from '../utils';
|
||||
import { DEFAULT_LINES_TO_SAMPLE, readFile, createUrlOverrides, processResults } from '../utils';
|
||||
import { getFileUpload } from '../../../../util/dependency_cache';
|
||||
|
||||
import { MODE } from './constants';
|
||||
|
@ -67,7 +61,10 @@ export class FileDataVisualizerView extends Component {
|
|||
// check the user has the correct permission to import data.
|
||||
// note, calling hasImportPermission with no arguments just checks the
|
||||
// cluster privileges, the user will still need index privileges to create and ingest
|
||||
const hasPermissionToImport = await hasImportPermission();
|
||||
const hasPermissionToImport = await getFileUpload().hasImportPermission({
|
||||
checkCreateIndexPattern: false,
|
||||
checkHasManagePipeline: true,
|
||||
});
|
||||
this.setState({ hasPermissionToImport });
|
||||
}
|
||||
|
||||
|
|
|
@ -35,7 +35,6 @@ import {
|
|||
import { ExperimentalBadge } from '../experimental_badge';
|
||||
import { getIndexPatternNames, loadIndexPatterns } from '../../../../util/index_utils';
|
||||
import { ml } from '../../../../services/ml_api_service';
|
||||
import { hasImportPermission } from '../utils';
|
||||
|
||||
const DEFAULT_TIME_FIELD = '@timestamp';
|
||||
const DEFAULT_INDEX_SETTINGS = { number_of_shards: 1 };
|
||||
|
@ -124,7 +123,13 @@ export class ImportView extends Component {
|
|||
},
|
||||
async () => {
|
||||
// check to see if the user has permission to create and ingest data into the specified index
|
||||
if ((await hasImportPermission(index)) === false) {
|
||||
if (
|
||||
(await getFileUpload().hasImportPermission({
|
||||
checkCreateIndexPattern: createIndexPattern,
|
||||
checkHasManagePipeline: true,
|
||||
indexName: index,
|
||||
})) === false
|
||||
) {
|
||||
errors.push(
|
||||
i18n.translate('xpack.ml.fileDatavisualizer.importView.importPermissionError', {
|
||||
defaultMessage:
|
||||
|
|
|
@ -5,10 +5,4 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
export {
|
||||
createUrlOverrides,
|
||||
hasImportPermission,
|
||||
processResults,
|
||||
readFile,
|
||||
DEFAULT_LINES_TO_SAMPLE,
|
||||
} from './utils';
|
||||
export { createUrlOverrides, processResults, readFile, DEFAULT_LINES_TO_SAMPLE } from './utils';
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
*/
|
||||
|
||||
import { isEqual } from 'lodash';
|
||||
import { ml } from '../../../../services/ml_api_service';
|
||||
import { AnalysisResult, InputOverrides } from '../../../../../../../file_upload/common';
|
||||
import { MB } from '../../../../../../../file_upload/public';
|
||||
|
||||
|
@ -136,27 +135,3 @@ export function processResults({ results, overrides }: AnalysisResult) {
|
|||
linesToSample,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* A check for the minimum privileges needed to create and ingest data into an index.
|
||||
* If called with no indexName, the check will just look for the minimum cluster privileges.
|
||||
* @param {string} indexName
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
export async function hasImportPermission(indexName: string) {
|
||||
const priv: { cluster: string[]; index?: any } = {
|
||||
cluster: ['cluster:admin/ingest/pipeline/put'],
|
||||
};
|
||||
|
||||
if (indexName !== undefined) {
|
||||
priv.index = [
|
||||
{
|
||||
names: [indexName],
|
||||
privileges: ['indices:data/write/bulk', 'indices:data/write/index', 'indices:admin/create'],
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
const resp = await ml.hasPrivileges(priv);
|
||||
return resp.securityDisabled === true || resp.has_all_requested === true;
|
||||
}
|
||||
|
|
|
@ -9,3 +9,4 @@ export { Actions } from './actions';
|
|||
export { AuthorizationService, AuthorizationServiceSetup } from './authorization_service';
|
||||
export { CheckSavedObjectsPrivileges } from './check_saved_objects_privileges';
|
||||
export { featurePrivilegeIterator } from './privileges';
|
||||
export { CheckPrivilegesPayload } from './types';
|
||||
|
|
|
@ -26,6 +26,7 @@ export type {
|
|||
InvalidateAPIKeyResult,
|
||||
GrantAPIKeyResult,
|
||||
} from './authentication';
|
||||
export type { CheckPrivilegesPayload } from './authorization';
|
||||
export {
|
||||
LegacyAuditLogger,
|
||||
AuditLogger,
|
||||
|
|
|
@ -0,0 +1,146 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import { FtrProviderContext } from '../../ftr_provider_context';
|
||||
|
||||
export default ({ getService }: FtrProviderContext) => {
|
||||
const supertestWithoutAuth = getService('supertestWithoutAuth');
|
||||
const security = getService('security');
|
||||
|
||||
const IMPORTER_ROLE_NAME = 'importer';
|
||||
const IMPORTER_USER_NAME = 'importer';
|
||||
const IMPORT_USER_PASSWORD = `${IMPORTER_USER_NAME}-password`;
|
||||
const INDEX_NAME = 'myNewIndex';
|
||||
|
||||
describe('GET /internal/file_upload/has_import_permission', () => {
|
||||
it('should return true when user has all permissions', async () => {
|
||||
try {
|
||||
await security.role.create(IMPORTER_ROLE_NAME, {
|
||||
elasticsearch: {
|
||||
cluster: ['manage_pipeline'],
|
||||
indices: [
|
||||
{
|
||||
names: [INDEX_NAME],
|
||||
privileges: ['create', 'create_index'],
|
||||
},
|
||||
],
|
||||
},
|
||||
kibana: [
|
||||
{
|
||||
feature: {
|
||||
indexPatterns: ['all'],
|
||||
},
|
||||
spaces: ['*'],
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
await security.user.create(IMPORTER_USER_NAME, {
|
||||
password: IMPORT_USER_PASSWORD,
|
||||
roles: [IMPORTER_ROLE_NAME],
|
||||
});
|
||||
|
||||
const resp = await supertestWithoutAuth
|
||||
.get(
|
||||
`/internal/file_upload/has_import_permission\
|
||||
?checkCreateIndexPattern=true\
|
||||
&checkHasManagePipeline=true\
|
||||
&indexName=${INDEX_NAME}`
|
||||
)
|
||||
.auth(IMPORTER_USER_NAME, IMPORT_USER_PASSWORD)
|
||||
.set('kbn-xsrf', 'kibana')
|
||||
.expect(200);
|
||||
|
||||
expect(resp.body.hasImportPermission).to.be(true);
|
||||
} finally {
|
||||
await security.role.delete(IMPORTER_ROLE_NAME);
|
||||
await security.user.delete(IMPORTER_USER_NAME);
|
||||
}
|
||||
});
|
||||
|
||||
it('should return false when user can not create index pattern when checkCreateIndexPattern=true', async () => {
|
||||
try {
|
||||
await security.role.create(IMPORTER_ROLE_NAME, {});
|
||||
|
||||
await security.user.create(IMPORTER_USER_NAME, {
|
||||
password: IMPORT_USER_PASSWORD,
|
||||
roles: [IMPORTER_ROLE_NAME],
|
||||
});
|
||||
|
||||
const resp = await supertestWithoutAuth
|
||||
.get(
|
||||
`/internal/file_upload/has_import_permission\
|
||||
?checkCreateIndexPattern=true\
|
||||
&checkHasManagePipeline=false`
|
||||
)
|
||||
.auth(IMPORTER_USER_NAME, IMPORT_USER_PASSWORD)
|
||||
.set('kbn-xsrf', 'kibana')
|
||||
.send()
|
||||
.expect(200);
|
||||
|
||||
expect(resp.body.hasImportPermission).to.be(false);
|
||||
} finally {
|
||||
await security.role.delete(IMPORTER_ROLE_NAME);
|
||||
await security.user.delete(IMPORTER_USER_NAME);
|
||||
}
|
||||
});
|
||||
|
||||
it('should return false when user can not create pipeline when checkHasManagePipeline=true', async () => {
|
||||
try {
|
||||
await security.role.create(IMPORTER_ROLE_NAME, {});
|
||||
|
||||
await security.user.create(IMPORTER_USER_NAME, {
|
||||
password: IMPORT_USER_PASSWORD,
|
||||
roles: [IMPORTER_ROLE_NAME],
|
||||
});
|
||||
|
||||
const resp = await supertestWithoutAuth
|
||||
.get(
|
||||
`/internal/file_upload/has_import_permission\
|
||||
?checkCreateIndexPattern=false\
|
||||
&checkHasManagePipeline=true`
|
||||
)
|
||||
.auth(IMPORTER_USER_NAME, IMPORT_USER_PASSWORD)
|
||||
.set('kbn-xsrf', 'kibana')
|
||||
.expect(200);
|
||||
|
||||
expect(resp.body.hasImportPermission).to.be(false);
|
||||
} finally {
|
||||
await security.role.delete(IMPORTER_ROLE_NAME);
|
||||
await security.user.delete(IMPORTER_USER_NAME);
|
||||
}
|
||||
});
|
||||
|
||||
it('should return false when user does not have index permissions', async () => {
|
||||
try {
|
||||
await security.role.create(IMPORTER_ROLE_NAME, {});
|
||||
|
||||
await security.user.create(IMPORTER_USER_NAME, {
|
||||
password: IMPORT_USER_PASSWORD,
|
||||
roles: [IMPORTER_ROLE_NAME],
|
||||
});
|
||||
|
||||
const resp = await supertestWithoutAuth
|
||||
.get(
|
||||
`/internal/file_upload/has_import_permission\
|
||||
?checkCreateIndexPattern=false\
|
||||
&checkHasManagePipeline=false\
|
||||
&indexName=${INDEX_NAME}`
|
||||
)
|
||||
.auth(IMPORTER_USER_NAME, IMPORT_USER_PASSWORD)
|
||||
.set('kbn-xsrf', 'kibana')
|
||||
.expect(200);
|
||||
|
||||
expect(resp.body.hasImportPermission).to.be(false);
|
||||
} finally {
|
||||
await security.role.delete(IMPORTER_ROLE_NAME);
|
||||
await security.user.delete(IMPORTER_USER_NAME);
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
14
x-pack/test/api_integration/apis/file_upload/index.ts
Normal file
14
x-pack/test/api_integration/apis/file_upload/index.ts
Normal file
|
@ -0,0 +1,14 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { FtrProviderContext } from '../../ftr_provider_context';
|
||||
|
||||
export default function ({ loadTestFile }: FtrProviderContext) {
|
||||
describe('File upload', function () {
|
||||
loadTestFile(require.resolve('./has_import_permission'));
|
||||
});
|
||||
}
|
|
@ -36,5 +36,6 @@ export default function ({ loadTestFile }: FtrProviderContext) {
|
|||
loadTestFile(require.resolve('./upgrade_assistant'));
|
||||
loadTestFile(require.resolve('./searchprofiler'));
|
||||
loadTestFile(require.resolve('./painless_lab'));
|
||||
loadTestFile(require.resolve('./file_upload'));
|
||||
});
|
||||
}
|
||||
|
|
|
@ -17,7 +17,11 @@ export default function ({ getPageObjects, getService }) {
|
|||
|
||||
describe('GeoJSON import layer panel', () => {
|
||||
before(async () => {
|
||||
await security.testUser.setRoles(['global_maps_all', 'geoall_data_writer']);
|
||||
await security.testUser.setRoles([
|
||||
'global_maps_all',
|
||||
'geoall_data_writer',
|
||||
'global_index_pattern_management_all',
|
||||
]);
|
||||
await PageObjects.maps.openNewMap();
|
||||
});
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue