Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[File upload] Add file upload x-pack plugin #32945

Closed
wants to merge 24 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
59c682d
Add file upload x-pack plugin
kindsun Mar 11, 2019
9abbc67
Clean up
kindsun Mar 11, 2019
13c187a
Remove unneeded cluster config
kindsun Mar 12, 2019
bd921a7
Remove unneeded test
kindsun Mar 12, 2019
4445658
First pass basic telemetry (not connected).
kindsun Mar 12, 2019
be3ab95
Basic telemetry connected
kindsun Mar 13, 2019
85c74bc
Review feedback
kindsun Mar 14, 2019
a36e7e0
Merge remote-tracking branch 'upstream/master' into plugin-file-upload
kindsun Mar 18, 2019
6015604
Revise telemetry to use savedObjectRepository. Capture metrics on app…
kindsun Mar 20, 2019
6b222f6
Lots of cleanup, consolidation of logic
kindsun Mar 22, 2019
dd577f3
Merge remote-tracking branch 'upstream/master' into plugin-file-upload
kindsun Mar 25, 2019
2aae63e
Clean up, reorg
kindsun Mar 26, 2019
8de9a97
Update telem tests and telem functions
kindsun Mar 26, 2019
63ec74e
Add back import data model
kindsun Mar 26, 2019
4b4f85a
Merge remote-tracking branch 'upstream/master' into plugin-file-upload
kindsun Apr 29, 2019
60a8a7c
Clean up and update telemetry tests
kindsun Apr 30, 2019
9ef9903
Merge remote-tracking branch 'upstream/master' into plugin-file-upload
kindsun Apr 30, 2019
056a892
Merge remote-tracking branch 'upstream/master' into plugin-file-upload
kindsun May 1, 2019
e264ec1
Fix telemetry test issues and update corresponding code
kindsun May 1, 2019
377a3a1
Up chunk limit to 30 MB
kindsun May 1, 2019
3e48977
Add file upload telemetry to saved objects management builder
kindsun May 1, 2019
23c33ff
Missing space
kindsun May 2, 2019
4361f88
Add descriptive comments to dynamic keys in telemetry fields
kindsun May 2, 2019
1fba1d0
Merge remote-tracking branch 'upstream/master' into plugin-file-upload
kindsun May 7, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions x-pack/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ import { translations } from './plugins/translations';
import { upgradeAssistant } from './plugins/upgrade_assistant';
import { uptime } from './plugins/uptime';
import { ossTelemetry } from './plugins/oss_telemetry';
import { fileUpload } from './plugins/file_upload';
import { encryptedSavedObjects } from './plugins/encrypted_saved_objects';

module.exports = function (kibana) {
Expand Down Expand Up @@ -78,6 +79,7 @@ module.exports = function (kibana) {
upgradeAssistant(kibana),
uptime(kibana),
ossTelemetry(kibana),
fileUpload(kibana),
encryptedSavedObjects(kibana),
];
};
11 changes: 11 additions & 0 deletions x-pack/plugins/file_upload/common/constants/file_import.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/

export const MAX_BYTES = 31457280;

// Value to use in the Elasticsearch index mapping metadata to identify the
// index as having been created by the File Upload Plugin.
export const INDEX_META_DATA_CREATED_BY = 'file-upload-plugin';
33 changes: 33 additions & 0 deletions x-pack/plugins/file_upload/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { mirrorPluginStatus } from '../../server/lib/mirror_plugin_status';
import { fileUploadRoutes } from './server/routes/file_upload';
import { makeUsageCollector } from './server/telemetry/';
import mappings from './mappings';

export const fileUpload = kibana => {
return new kibana.Plugin({
require: ['elasticsearch', 'xpack_main'],
name: 'file_upload',
id: 'file_upload',
uiExports: {
mappings,
},
savedObjectSchemas: {
'file-upload-telemetry': {
isNamespaceAgnostic: true
}
},

init(server) {
const { xpack_main: xpackMainPlugin } = server.plugins;

mirrorPluginStatus(xpackMainPlugin, this);
fileUploadRoutes(server);
makeUsageCollector(server);
}
});
};
17 changes: 17 additions & 0 deletions x-pack/plugins/file_upload/mappings.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
{
"file-upload-telemetry": {
"properties": {
"filesUploadedTotalCount": {
"type": "long"
},
"filesUploadedTypesTotalCounts": {
"dynamic": "true",
"properties": {}
},
"filesUploadedByApp": {
"dynamic": "true",
"properties": {}
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/

import { Server } from 'hapi';

export function callWithInternalUserFactory(server: Server): any;
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/



import { once } from 'lodash';

const _callWithInternalUser = once((server) => {
const { callWithInternalUser } = server.plugins.elasticsearch.getCluster('admin');
return callWithInternalUser;
});

export const callWithInternalUserFactory = (server) => {
return (...args) => {
return _callWithInternalUser(server)(...args);
};
};
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/

import { callWithInternalUserFactory } from './call_with_internal_user_factory';

describe('call_with_internal_user_factory', () => {
describe('callWithInternalUserFactory', () => {
let server: any;
let callWithInternalUser: any;

beforeEach(() => {
callWithInternalUser = jest.fn();
server = {
plugins: {
elasticsearch: {
getCluster: jest.fn(() => ({ callWithInternalUser })),
},
},
};
});

it('should use internal user "admin"', () => {
const callWithInternalUserInstance = callWithInternalUserFactory(server);
callWithInternalUserInstance();

expect(server.plugins.elasticsearch.getCluster).toHaveBeenCalledWith('admin');
});
});
});
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/



import { once } from 'lodash';

const callWithRequest = once((server) => {
const cluster = server.plugins.elasticsearch.getCluster('data');
return cluster.callWithRequest;
});

export const callWithRequestFactory = (server, request) => {
return (...args) => {
return callWithRequest(server)(request, ...args);
};
};
13 changes: 13 additions & 0 deletions x-pack/plugins/file_upload/server/client/errors.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/



import { boomify } from 'boom';

export function wrapError(error) {
return boomify(error, { statusCode: error.status });
}
167 changes: 167 additions & 0 deletions x-pack/plugins/file_upload/server/models/import_data/import_data.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,167 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/

import { INDEX_META_DATA_CREATED_BY } from '../../../common/constants/file_import';
import uuid from 'uuid';

export function importDataProvider(callWithRequest) {
async function importData(id, index, settings, mappings, ingestPipeline, data) {
let createdIndex;
let createdPipelineId;
const docCount = data.length;

try {

const {
id: pipelineId,
pipeline,
} = ingestPipeline;

if (id === undefined) {
// first chunk of data, create the index and id to return
id = uuid.v1();

await createIndex(index, settings, mappings);
createdIndex = index;

// create the pipeline if one has been supplied
if (pipelineId !== undefined) {
const success = await createPipeline(pipelineId, pipeline);
if (success.acknowledged !== true) {
throw success;
}
}
createdPipelineId = pipelineId;

} else {
createdIndex = index;
createdPipelineId = pipelineId;
}

let failures = [];
if (data.length) {
const resp = await indexData(index, createdPipelineId, data);
if (resp.success === false) {
if (resp.ingestError) {
// all docs failed, abort
throw resp;
} else {
// some docs failed.
// still report success but with a list of failures
failures = (resp.failures || []);
}
}
}

return {
success: true,
id,
index: createdIndex,
pipelineId: createdPipelineId,
docCount,
failures,
};
} catch (error) {
return {
success: false,
id,
index: createdIndex,
pipelineId: createdPipelineId,
error: (error.error !== undefined) ? error.error : error,
docCount,
ingestError: error.ingestError,
failures: (error.failures || [])
};
}
}

async function createIndex(index, settings, mappings) {
const body = {
mappings: {
_meta: {
created_by: INDEX_META_DATA_CREATED_BY
},
properties: mappings
}
};

if (settings && Object.keys(settings).length) {
body.settings = settings;
}

await callWithRequest('indices.create', { index, body });
}

async function indexData(index, pipelineId, data) {
try {
const body = [];
for (let i = 0; i < data.length; i++) {
body.push({ index: {} });
body.push(data[i]);
}

const settings = { index, body };
if (pipelineId !== undefined) {
settings.pipeline = pipelineId;
}

const resp = await callWithRequest('bulk', settings);
if (resp.errors) {
throw resp;
} else {
return {
success: true,
docs: data.length,
failures: [],
};
}
} catch (error) {

let failures = [];
let ingestError = false;
if (error.errors !== undefined && Array.isArray(error.items)) {
// an expected error where some or all of the bulk request
// docs have failed to be ingested.
failures = getFailures(error.items, data);
} else {
// some other error has happened.
ingestError = true;
}

return {
success: false,
error,
docCount: data.length,
failures,
ingestError,
};
}

}

async function createPipeline(id, pipeline) {
return await callWithRequest('ingest.putPipeline', { id, body: pipeline });
}

function getFailures(items, data) {
const failures = [];
for (let i = 0; i < items.length; i++) {
const item = items[i];
if (item.index && item.index.error) {
failures.push({
item: i,
reason: item.index.error.reason,
doc: data[i],
});
}
}
return failures;
}

return {
importData,
};
}
8 changes: 8 additions & 0 deletions x-pack/plugins/file_upload/server/models/import_data/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/


export { importDataProvider } from './import_data';
Loading