diff --git a/x-pack/plugins/file_upload/common/constants.ts b/x-pack/plugins/file_upload/common/constants.ts index 5744429c80d4db..11ad80f5c955ee 100644 --- a/x-pack/plugins/file_upload/common/constants.ts +++ b/x-pack/plugins/file_upload/common/constants.ts @@ -5,6 +5,7 @@ * 2.0. */ +export const MB = Math.pow(2, 20); export const MAX_FILE_SIZE = '100MB'; export const MAX_FILE_SIZE_BYTES = 104857600; // 100MB diff --git a/x-pack/plugins/file_upload/common/types.ts b/x-pack/plugins/file_upload/common/types.ts index eac001fc02f15d..c01e514f0f720e 100644 --- a/x-pack/plugins/file_upload/common/types.ts +++ b/x-pack/plugins/file_upload/common/types.ts @@ -26,7 +26,7 @@ export interface Doc { message: string; } -export type ImportDoc = Doc | string; +export type ImportDoc = Doc | string | object; export interface Settings { pipeline?: string; diff --git a/x-pack/plugins/file_upload/public/api/index.ts b/x-pack/plugins/file_upload/public/api/index.ts new file mode 100644 index 00000000000000..359bc4a1687b53 --- /dev/null +++ b/x-pack/plugins/file_upload/public/api/index.ts @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; +import { FileUploadComponentProps, lazyLoadFileUploadModules } from '../lazy_load_bundle'; +import type { IImporter, ImportFactoryOptions } from '../importer'; + +export interface FileUploadStartApi { + getFileUploadComponent(): Promise>; + importerFactory(format: string, options: ImportFactoryOptions): Promise; +} + +export async function getFileUploadComponent(): Promise< + React.ComponentType +> { + const fileUploadModules = await lazyLoadFileUploadModules(); + return fileUploadModules.JsonUploadAndParse; +} + +export async function importerFactory( + format: string, + options: ImportFactoryOptions +): Promise { + const fileUploadModules = await lazyLoadFileUploadModules(); + return fileUploadModules.importerFactory(format, options); +} diff --git a/x-pack/plugins/file_upload/public/components/json_import_progress.js b/x-pack/plugins/file_upload/public/components/json_import_progress.js index 1f9293e77d33c7..1adf7d9039e56b 100644 --- a/x-pack/plugins/file_upload/public/components/json_import_progress.js +++ b/x-pack/plugins/file_upload/public/components/json_import_progress.js @@ -9,7 +9,7 @@ import React, { Fragment, Component } from 'react'; import { i18n } from '@kbn/i18n'; import { EuiCodeBlock, EuiSpacer, EuiText, EuiTitle, EuiProgress, EuiCallOut } from '@elastic/eui'; import { FormattedMessage } from '@kbn/i18n/react'; -import { basePath } from '../kibana_services'; +import { getHttp } from '../kibana_services'; export class JsonImportProgress extends Component { state = { @@ -118,7 +118,7 @@ export class JsonImportProgress extends Component { {i18n.translate('xpack.fileUpload.jsonImport.indexMgmtLink', { defaultMessage: 'Index Management', diff --git a/x-pack/plugins/file_upload/public/components/json_index_file_picker.js b/x-pack/plugins/file_upload/public/components/json_index_file_picker.js index a92412ae9d697c..78bf7378578de8 100644 --- a/x-pack/plugins/file_upload/public/components/json_index_file_picker.js +++ b/x-pack/plugins/file_upload/public/components/json_index_file_picker.js @@ -9,7 +9,6 @@ import React, { Fragment, Component } from 'react'; import { EuiFilePicker, EuiFormRow, EuiProgress } from '@elastic/eui'; import { FormattedMessage } from '@kbn/i18n/react'; import { i18n } from '@kbn/i18n'; -import { parseFile } from '../util/file_parser'; const MAX_FILE_SIZE = 52428800; const ACCEPTABLE_FILETYPES = ['json', 'geojson']; @@ -33,7 +32,7 @@ export class JsonIndexFilePicker extends Component { this._isMounted = false; } - getFileParseActive = () => this._isMounted && this.state.fileParseActive; + isFileParseActive = () => this._isMounted && this.state.fileParseActive; _fileHandler = (fileList) => { const fileArr = Array.from(fileList); @@ -61,36 +60,6 @@ export class JsonIndexFilePicker extends Component { ); }; - _checkFileSize = ({ size }) => { - const fileSizeValid = true; - try { - if (size > MAX_FILE_SIZE) { - const humanReadableSize = bytesToSize(size); - const humanReadableMaxSize = bytesToSize(MAX_FILE_SIZE); - throw new Error( - i18n.translate('xpack.fileUpload.jsonIndexFilePicker.acceptableFileSize', { - defaultMessage: 'File size {fileSize} exceeds max file size of {maxFileSize}', - values: { - fileSize: humanReadableSize, - maxFileSize: humanReadableMaxSize, - }, - }) - ); - } - } catch (error) { - this.setState({ - fileUploadError: i18n.translate('xpack.fileUpload.jsonIndexFilePicker.fileSizeError', { - defaultMessage: 'File size error: {errorMessage}', - values: { - errorMessage: error.message, - }, - }), - }); - return; - } - return fileSizeValid; - }; - _getFileNameAndCheckType({ name }) { let fileNameOnly; try { @@ -136,54 +105,58 @@ export class JsonIndexFilePicker extends Component { setFileProgress = ({ featuresProcessed, bytesProcessed, totalBytes }) => { const percentageProcessed = parseInt((100 * bytesProcessed) / totalBytes); - if (this.getFileParseActive()) { + if (this.isFileParseActive()) { this.setState({ featuresProcessed, percentageProcessed }); } }; async _parseFile(file) { const { currentFileTracker } = this.state; - const { - setFileRef, - setParsedFile, - resetFileAndIndexSettings, - onFileUpload, - transformDetails, - setIndexName, - } = this.props; + const { setFileRef, setParsedFile, resetFileAndIndexSettings } = this.props; + + if (file.size > MAX_FILE_SIZE) { + this.setState({ + fileUploadError: i18n.translate('xpack.fileUpload.jsonIndexFilePicker.acceptableFileSize', { + defaultMessage: 'File size {fileSize} exceeds maximum file size of {maxFileSize}', + values: { + fileSize: bytesToSize(file.size), + maxFileSize: bytesToSize(MAX_FILE_SIZE), + }, + }), + }); + resetFileAndIndexSettings(); + return; + } - const fileSizeValid = this._checkFileSize(file); const defaultIndexName = this._getFileNameAndCheckType(file); - if (!fileSizeValid || !defaultIndexName) { + if (!defaultIndexName) { resetFileAndIndexSettings(); return; } - // Parse file - const fileResult = await parseFile({ - file, - transformDetails, - onFileUpload, - setFileProgress: this.setFileProgress, - getFileParseActive: this.getFileParseActive, - }).catch((err) => { - if (this._isMounted) { - this.setState({ - fileParseActive: false, - percentageProcessed: 0, - featuresProcessed: 0, - fileUploadError: ( - - ), - }); - } - }); + const fileResult = await this.props.geojsonImporter + .readFile(file, this.setFileProgress, this.isFileParseActive) + .catch((err) => { + if (this._isMounted) { + this.setState({ + fileParseActive: false, + percentageProcessed: 0, + featuresProcessed: 0, + fileUploadError: ( + + ), + }); + resetFileAndIndexSettings(); + return; + } + }); + if (!this._isMounted) { return; } @@ -198,25 +171,20 @@ export class JsonIndexFilePicker extends Component { resetFileAndIndexSettings(); return; } - const { errors, parsedGeojson } = fileResult; - if (errors.length) { - // Set only the first error for now (since there's only one). - // TODO: Add handling in case of further errors - const error = errors[0]; + if (fileResult.errors.length) { this.setState({ fileUploadError: ( ), }); } - setIndexName(defaultIndexName); setFileRef(file); - setParsedFile(parsedGeojson); + setParsedFile(fileResult, defaultIndexName); } render() { diff --git a/x-pack/plugins/file_upload/public/components/json_upload_and_parse.js b/x-pack/plugins/file_upload/public/components/json_upload_and_parse.js index 6a9d7ce74fe842..d4f6858eb59950 100644 --- a/x-pack/plugins/file_upload/public/components/json_upload_and_parse.js +++ b/x-pack/plugins/file_upload/public/components/json_upload_and_parse.js @@ -9,12 +9,13 @@ import React, { Component, Fragment } from 'react'; import { i18n } from '@kbn/i18n'; import { EuiForm } from '@elastic/eui'; import PropTypes from 'prop-types'; -import { indexData, createIndexPattern } from '../util/indexing_service'; -import { getGeoIndexTypesForFeatures } from '../util/geo_processing'; import { IndexSettings } from './index_settings'; import { JsonIndexFilePicker } from './json_index_file_picker'; import { JsonImportProgress } from './json_import_progress'; import _ from 'lodash'; +import { GeoJsonImporter } from '../importer/geojson_importer'; +import { ES_FIELD_TYPES } from '../../../../../src/plugins/data/public'; +import { getIndexPatternService } from '../kibana_services'; const INDEXING_STAGE = { INDEXING_STARTED: i18n.translate('xpack.fileUpload.jsonUploadAndParse.dataIndexingStarted', { @@ -43,6 +44,8 @@ const INDEXING_STAGE = { }; export class JsonUploadAndParse extends Component { + geojsonImporter = new GeoJsonImporter(); + state = { // File state fileRef: null, @@ -87,11 +90,8 @@ export class JsonUploadAndParse extends Component { }); }; - componentDidUpdate(prevProps, prevState) { - if (!_.isEqual(prevState.parsedFile, this.state.parsedFile)) { - this._setIndexTypes({ ...this.state, ...this.props }); - } - this._setSelectedType(this.state); + componentDidUpdate() { + this._updateIndexType(); this._setIndexReady({ ...this.state, ...this.props }); this._indexData({ ...this.state, ...this.props }); if (this.props.isIndexingTriggered && !this.state.showImportProgress && this._isMounted) { @@ -99,11 +99,30 @@ export class JsonUploadAndParse extends Component { } } - _setSelectedType = ({ selectedIndexType, indexTypes }) => { - if (!selectedIndexType && indexTypes.length) { - this.setState({ selectedIndexType: indexTypes[0] }); + _updateIndexType() { + let nextIndexTypes = []; + if (this.state.parsedFile) { + nextIndexTypes = + this.state.parsedFile.geometryTypes.includes('Point') || + this.state.parsedFile.geometryTypes.includes('MultiPoint') + ? [ES_FIELD_TYPES.GEO_POINT, ES_FIELD_TYPES.GEO_SHAPE] + : [ES_FIELD_TYPES.GEO_SHAPE]; } - }; + if (!_.isEqual(nextIndexTypes, this.state.indexTypes)) { + this.setState({ indexTypes: nextIndexTypes }); + } + + if (!this.state.selectedIndexType && nextIndexTypes.length) { + // auto select index type + this.setState({ selectedIndexType: nextIndexTypes[0] }); + } else if ( + this.state.selectedIndexType && + !nextIndexTypes.includes(this.state.selectedIndexType) + ) { + // unselected indexType if selected type is not longer an option + this.setState({ selectedIndexType: null }); + } + } _setIndexReady = ({ parsedFile, @@ -131,14 +150,12 @@ export class JsonUploadAndParse extends Component { indexedFile, parsedFile, indexRequestInFlight, - transformDetails, indexName, - appName, selectedIndexType, isIndexingTriggered, isIndexReady, onIndexingComplete, - boolCreateIndexPattern, + onIndexingError, }) => { // Check index ready const filesAreEqual = _.isEqual(indexedFile, parsedFile); @@ -150,108 +167,108 @@ export class JsonUploadAndParse extends Component { currentIndexingStage: INDEXING_STAGE.WRITING_TO_INDEX, }); - // Index data - const indexDataResp = await indexData( - parsedFile, - transformDetails, + this.geojsonImporter.setDocs(parsedFile.parsedGeojson, selectedIndexType); + + // initialize import + const settings = { + number_of_shards: 1, + }; + const mappings = { + properties: { + coordinates: { + type: this.state.selectedIndexType, + }, + }, + }; + const ingestPipeline = {}; + const initializeImportResp = await this.geojsonImporter.initializeImport( indexName, - selectedIndexType, - appName + settings, + mappings, + ingestPipeline ); - if (!this._isMounted) { return; } - - // Index error - if (!indexDataResp.success) { + if (initializeImportResp.index === undefined || initializeImportResp.id === undefined) { this.setState({ - indexedFile: null, - indexDataResp, indexRequestInFlight: false, currentIndexingStage: INDEXING_STAGE.INDEXING_ERROR, }); this._resetFileAndIndexSettings(); - if (onIndexingComplete) { - onIndexingComplete({ indexDataResp }); - } + onIndexingError(); return; } - // Index data success. Update state & create index pattern - this.setState({ - indexDataResp, - indexedFile: parsedFile, - currentIndexingStage: INDEXING_STAGE.INDEXING_COMPLETE, - }); - let indexPatternResp; - if (boolCreateIndexPattern) { - indexPatternResp = await this._createIndexPattern(this.state); + // import file + const importResp = await this.geojsonImporter.import( + initializeImportResp.id, + indexName, + initializeImportResp.pipelineId, + () => {} + ); + if (!this._isMounted) { + return; } - - // Indexing complete, update state & callback (if any) - if (!this._isMounted || !indexPatternResp) { + if (!importResp.success) { + this.setState({ + indexDataResp: importResp, + indexRequestInFlight: false, + currentIndexingStage: INDEXING_STAGE.INDEXING_ERROR, + }); + this._resetFileAndIndexSettings(); + onIndexingError(); return; } this.setState({ - currentIndexingStage: INDEXING_STAGE.INDEX_PATTERN_COMPLETE, + indexDataResp: importResp, + indexedFile: parsedFile, + currentIndexingStage: INDEXING_STAGE.INDEXING_COMPLETE, }); - if (onIndexingComplete) { - onIndexingComplete({ - indexDataResp, - ...(boolCreateIndexPattern ? { indexPatternResp } : {}), - }); - } - }; - _createIndexPattern = async ({ indexName }) => { - if (!this._isMounted) { - return; - } + // create index pattern this.setState({ indexPatternRequestInFlight: true, currentIndexingStage: INDEXING_STAGE.CREATING_INDEX_PATTERN, }); - const indexPatternResp = await createIndexPattern(indexName); - + let indexPattern; + try { + indexPattern = await getIndexPatternService().createAndSave( + { + title: indexName, + }, + true + ); + } catch (error) { + if (this._isMounted) { + this.setState({ + indexPatternRequestInFlight: false, + currentIndexingStage: INDEXING_STAGE.INDEX_PATTERN_ERROR, + }); + this._resetFileAndIndexSettings(); + onIndexingError(); + } + return; + } if (!this._isMounted) { return; } this.setState({ - indexPatternResp, + indexPatternResp: { + success: true, + id: indexPattern.id, + fields: indexPattern.fields, + }, indexPatternRequestInFlight: false, }); + this.setState({ + currentIndexingStage: INDEXING_STAGE.INDEX_PATTERN_COMPLETE, + }); this._resetFileAndIndexSettings(); - - return indexPatternResp; - }; - - // This is mostly for geo. Some data have multiple valid index types that can - // be chosen from, such as 'geo_point' vs. 'geo_shape' for point data - _setIndexTypes = ({ transformDetails, parsedFile }) => { - if (parsedFile) { - // User-provided index types - if (typeof transformDetails === 'object') { - this.setState({ indexTypes: transformDetails.indexTypes }); - } else { - // Included index types - switch (transformDetails) { - case 'geo': - const featureTypes = _.uniq( - parsedFile.features - ? parsedFile.features.map(({ geometry }) => geometry.type) - : [parsedFile.geometry.type] - ); - this.setState({ - indexTypes: getGeoIndexTypesForFeatures(featureTypes), - }); - break; - default: - this.setState({ indexTypes: [] }); - return; - } - } - } + onIndexingComplete({ + indexDataResp: importResp, + indexPattern, + }); }; render() { @@ -264,7 +281,6 @@ export class JsonUploadAndParse extends Component { indexTypes, showImportProgress, } = this.state; - const { onFileUpload, transformDetails } = this.props; return ( @@ -282,15 +298,14 @@ export class JsonUploadAndParse extends Component { ) : ( this.setState({ indexName }), - setFileRef: (fileRef) => this.setState({ fileRef }), - setParsedFile: (parsedFile) => this.setState({ parsedFile }), - transformDetails, - resetFileAndIndexSettings: this._resetFileAndIndexSettings, + fileRef={fileRef} + setFileRef={(fileRef) => this.setState({ fileRef })} + setParsedFile={(parsedFile, indexName) => { + this.setState({ parsedFile, indexName }); + this.props.onFileUpload(parsedFile.parsedGeojson, indexName); }} + resetFileAndIndexSettings={this._resetFileAndIndexSettings} + geojsonImporter={this.geojsonImporter} /> void; - onFileRemove: () => void; - onIndexReady: (indexReady: boolean) => void; - transformDetails: string; - onIndexingComplete: (indexResponses: { - indexDataResp: unknown; - indexPatternResp: unknown; - }) => void; -} - -let lazyLoadPromise: Promise>; - -export async function getFileUploadComponent(): Promise< - React.ComponentType -> { - if (typeof lazyLoadPromise !== 'undefined') { - return lazyLoadPromise; - } - - lazyLoadPromise = new Promise(async (resolve) => { - // @ts-expect-error - const { JsonUploadAndParse } = await import('./components/json_upload_and_parse'); - resolve(JsonUploadAndParse); - }); - return lazyLoadPromise; -} diff --git a/x-pack/plugins/file_upload/public/util/geo_json_clean_and_validate.js b/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_clean_and_validate.js similarity index 100% rename from x-pack/plugins/file_upload/public/util/geo_json_clean_and_validate.js rename to x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_clean_and_validate.js diff --git a/x-pack/plugins/file_upload/public/util/geo_json_clean_and_validate.test.js b/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_clean_and_validate.test.js similarity index 97% rename from x-pack/plugins/file_upload/public/util/geo_json_clean_and_validate.test.js rename to x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_clean_and_validate.test.js index 7203e50674c384..0f8d126251dfba 100644 --- a/x-pack/plugins/file_upload/public/util/geo_json_clean_and_validate.test.js +++ b/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_clean_and_validate.test.js @@ -5,7 +5,7 @@ * 2.0. */ -import { cleanGeometry, geoJsonCleanAndValidate } from './geo_json_clean_and_validate'; +import { cleanGeometry, geoJsonCleanAndValidate } from './geojson_clean_and_validate'; import * as jsts from 'jsts'; describe('geo_json_clean_and_validate', () => { diff --git a/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.test.js b/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.test.js new file mode 100644 index 00000000000000..e348686dc060ae --- /dev/null +++ b/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.test.js @@ -0,0 +1,225 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { GeoJsonImporter } from './geojson_importer'; +import { ES_FIELD_TYPES } from '../../../../../../src/plugins/data/public'; +import '@loaders.gl/polyfills'; + +const FEATURE_COLLECTION = { + type: 'FeatureCollection', + features: [ + { + type: 'Feature', + properties: { + population: 200, + }, + geometry: { + type: 'Point', + coordinates: [-112.0372, 46.608058], + }, + }, + ], +}; + +describe('readFile', () => { + const setFileProgress = jest.fn((a) => a); + + const FILE_WITH_FEATURE_COLLECTION = new File( + [JSON.stringify(FEATURE_COLLECTION)], + 'testfile.json', + { type: 'text/json' } + ); + + beforeEach(() => { + jest.resetAllMocks(); + jest.restoreAllMocks(); + }); + + test('should throw error if no file provided', async () => { + const importer = new GeoJsonImporter(); + await importer + .readFile(null, setFileProgress, () => { + return true; + }) + .catch((e) => { + expect(e.message).toMatch('Error, no file provided'); + }); + }); + + test('should abort if file parse is cancelled', async () => { + const importer = new GeoJsonImporter(); + + const results = await importer.readFile(FILE_WITH_FEATURE_COLLECTION, setFileProgress, () => { + return false; + }); + + expect(results).toBeNull(); + }); + + test('should read features from feature collection', async () => { + const importer = new GeoJsonImporter(); + const results = await importer.readFile(FILE_WITH_FEATURE_COLLECTION, setFileProgress, () => { + return true; + }); + + expect(setFileProgress).toHaveBeenCalled(); + expect(results).toEqual({ + errors: [], + geometryTypes: ['Point'], + parsedGeojson: FEATURE_COLLECTION, + }); + }); + + test('should remove features without geometry', async () => { + const fileWithFeaturesWithoutGeometry = new File( + [ + JSON.stringify({ + type: 'FeatureCollection', + features: [ + { + type: 'Feature', + properties: { + population: 200, + }, + geometry: { + type: 'Point', + coordinates: [-112.0372, 46.608058], + }, + }, + {}, + { geometry: {} }, + ], + }), + ], + 'testfile.json', + { type: 'text/json' } + ); + + const importer = new GeoJsonImporter(); + const results = await importer.readFile( + fileWithFeaturesWithoutGeometry, + setFileProgress, + () => { + return true; + } + ); + + expect(setFileProgress).toHaveBeenCalled(); + expect(results).toEqual({ + errors: ['2 features without geometry omitted'], + geometryTypes: ['Point'], + parsedGeojson: FEATURE_COLLECTION, + }); + }); + + test('should read unwrapped feature', async () => { + const fileWithUnwrapedFeature = new File( + [ + JSON.stringify({ + type: 'Feature', + properties: { + population: 200, + }, + geometry: { + type: 'Point', + coordinates: [-112.0372, 46.608058], + }, + }), + ], + 'testfile.json', + { type: 'text/json' } + ); + + const importer = new GeoJsonImporter(); + const results = await importer.readFile(fileWithUnwrapedFeature, setFileProgress, () => { + return true; + }); + + expect(setFileProgress).toHaveBeenCalled(); + expect(results).toEqual({ + errors: [], + geometryTypes: ['Point'], + parsedGeojson: FEATURE_COLLECTION, + }); + }); + + test('should throw if no features', async () => { + const fileWithNoFeatures = new File( + [ + JSON.stringify({ + type: 'FeatureCollection', + features: [], + }), + ], + 'testfile.json', + { type: 'text/json' } + ); + + const importer = new GeoJsonImporter(); + await importer + .readFile(fileWithNoFeatures, setFileProgress, () => { + return true; + }) + .catch((e) => { + expect(e.message).toMatch('Error, no features detected'); + }); + }); + + test('should throw if no features with geometry', async () => { + const fileWithFeaturesWithNoGeometry = new File( + [ + JSON.stringify({ + type: 'FeatureCollection', + features: [ + {}, + { + geometry: {}, + }, + ], + }), + ], + 'testfile.json', + { type: 'text/json' } + ); + + const importer = new GeoJsonImporter(); + await importer + .readFile(fileWithFeaturesWithNoGeometry, setFileProgress, () => { + return true; + }) + .catch((e) => { + expect(e.message).toMatch('Error, no features detected'); + }); + }); +}); + +describe('setDocs', () => { + test('should convert features to geo_point ES documents', () => { + const importer = new GeoJsonImporter(); + importer.setDocs(FEATURE_COLLECTION, ES_FIELD_TYPES.GEO_POINT); + expect(importer.getDocs()).toEqual([ + { + coordinates: [-112.0372, 46.608058], + population: 200, + }, + ]); + }); + + test('should convert features to geo_shape ES documents', () => { + const importer = new GeoJsonImporter(); + importer.setDocs(FEATURE_COLLECTION, ES_FIELD_TYPES.GEO_SHAPE); + expect(importer.getDocs()).toEqual([ + { + coordinates: { + type: 'point', + coordinates: [-112.0372, 46.608058], + }, + population: 200, + }, + ]); + }); +}); diff --git a/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.ts b/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.ts new file mode 100644 index 00000000000000..189084e9180daa --- /dev/null +++ b/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.ts @@ -0,0 +1,178 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { + Feature, + FeatureCollection, + Point, + MultiPoint, + LineString, + MultiLineString, + Polygon, + MultiPolygon, +} from 'geojson'; +import { i18n } from '@kbn/i18n'; +// @ts-expect-error +import { JSONLoader, loadInBatches } from './loaders'; +import { CreateDocsResponse } from '../types'; +import { Importer } from '../importer'; +import { ES_FIELD_TYPES } from '../../../../../../src/plugins/data/public'; +// @ts-expect-error +import { geoJsonCleanAndValidate } from './geojson_clean_and_validate'; + +export class GeoJsonImporter extends Importer { + constructor() { + super(); + } + + public read(data: ArrayBuffer): { success: boolean } { + throw new Error('read(data: ArrayBuffer) not supported, use readFile instead.'); + } + + protected _createDocs(text: string): CreateDocsResponse { + throw new Error('_createDocs not implemented.'); + } + + public getDocs() { + return this._docArray; + } + + public setDocs( + featureCollection: FeatureCollection, + geoFieldType: ES_FIELD_TYPES.GEO_POINT | ES_FIELD_TYPES.GEO_SHAPE + ) { + this._docArray = []; + for (let i = 0; i < featureCollection.features.length; i++) { + const feature = featureCollection.features[i]; + const geometry = feature.geometry as + | Point + | MultiPoint + | LineString + | MultiLineString + | Polygon + | MultiPolygon; + const coordinates = + geoFieldType === ES_FIELD_TYPES.GEO_SHAPE + ? { + type: geometry.type.toLowerCase(), + coordinates: geometry.coordinates, + } + : geometry.coordinates; + const properties = feature.properties ? feature.properties : {}; + this._docArray.push({ + coordinates, + ...properties, + }); + } + } + + public async readFile( + file: File, + setFileProgress: ({ + featuresProcessed, + bytesProcessed, + totalBytes, + }: { + featuresProcessed: number; + bytesProcessed: number; + totalBytes: number; + }) => void, + isFileParseActive: () => boolean + ): Promise<{ + errors: string[]; + geometryTypes: string[]; + parsedGeojson: FeatureCollection; + } | null> { + if (!file) { + throw new Error( + i18n.translate('xpack.fileUpload.fileParser.noFileProvided', { + defaultMessage: 'Error, no file provided', + }) + ); + } + + return new Promise(async (resolve, reject) => { + const batches = await loadInBatches(file, JSONLoader, { + json: { + jsonpaths: ['$.features'], + _rootObjectBatches: true, + }, + }); + + const rawFeatures: unknown[] = []; + for await (const batch of batches) { + if (!isFileParseActive()) { + break; + } + + if (batch.batchType === 'root-object-batch-complete') { + // Handle single feature geoJson + if (rawFeatures.length === 0) { + rawFeatures.push(batch.container); + } + } else { + rawFeatures.push(...batch.data); + } + + setFileProgress({ + featuresProcessed: rawFeatures.length, + bytesProcessed: batch.bytesUsed, + totalBytes: file.size, + }); + } + + if (!isFileParseActive()) { + resolve(null); + return; + } + + if (rawFeatures.length === 0) { + reject( + new Error( + i18n.translate('xpack.fileUpload.fileParser.noFeaturesDetected', { + defaultMessage: 'Error, no features detected', + }) + ) + ); + return; + } + + const features: Feature[] = []; + const geometryTypesMap = new Map(); + let invalidCount = 0; + for (let i = 0; i < rawFeatures.length; i++) { + const rawFeature = rawFeatures[i] as Feature; + if (!rawFeature.geometry || !rawFeature.geometry.type) { + invalidCount++; + } else { + if (!geometryTypesMap.has(rawFeature.geometry.type)) { + geometryTypesMap.set(rawFeature.geometry.type, true); + } + features.push(geoJsonCleanAndValidate(rawFeature)); + } + } + + const errors: string[] = []; + if (invalidCount > 0) { + errors.push( + i18n.translate('xpack.fileUpload.fileParser.featuresOmitted', { + defaultMessage: '{invalidCount} features without geometry omitted', + values: { invalidCount }, + }) + ); + } + resolve({ + errors, + geometryTypes: Array.from(geometryTypesMap.keys()), + parsedGeojson: { + type: 'FeatureCollection', + features, + }, + }); + }); + } +} diff --git a/x-pack/plugins/file_upload/public/importer/geojson_importer/index.ts b/x-pack/plugins/file_upload/public/importer/geojson_importer/index.ts new file mode 100644 index 00000000000000..9ffb84e603161b --- /dev/null +++ b/x-pack/plugins/file_upload/public/importer/geojson_importer/index.ts @@ -0,0 +1,8 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +export { GeoJsonImporter } from './geojson_importer'; diff --git a/x-pack/plugins/file_upload/public/importer/geojson_importer/loaders.js b/x-pack/plugins/file_upload/public/importer/geojson_importer/loaders.js new file mode 100644 index 00000000000000..eb6d69a4b57b7b --- /dev/null +++ b/x-pack/plugins/file_upload/public/importer/geojson_importer/loaders.js @@ -0,0 +1,10 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +// Loading @loaders.gl from javascriopt file to typescript compilation failures within @loaders.gl. +export { JSONLoader } from '@loaders.gl/json'; +export { loadInBatches } from '@loaders.gl/core'; diff --git a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/importer.ts b/x-pack/plugins/file_upload/public/importer/importer.ts similarity index 80% rename from x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/importer.ts rename to x-pack/plugins/file_upload/public/importer/importer.ts index 518d3808b2da2d..8bdb465bd69cf4 100644 --- a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/importer.ts +++ b/x-pack/plugins/file_upload/public/importer/importer.ts @@ -8,7 +8,7 @@ import { chunk } from 'lodash'; import moment from 'moment'; import { i18n } from '@kbn/i18n'; -import { ml } from '../../../../../services/ml_api_service'; +import { getHttp } from '../kibana_services'; import { ImportDoc, ImportFailure, @@ -16,49 +16,21 @@ import { Mappings, Settings, IngestPipeline, -} from '../../../../../../../../file_upload/common'; + MB, +} from '../../common'; +import { CreateDocsResponse, IImporter, ImportResults } from './types'; const CHUNK_SIZE = 5000; const MAX_CHUNK_CHAR_COUNT = 1000000; const IMPORT_RETRIES = 5; const STRING_CHUNKS_MB = 100; -export interface ImportConfig { - settings: Settings; - mappings: Mappings; - pipeline: IngestPipeline; -} - -export interface ImportResults { - success: boolean; - failures?: any[]; - docCount?: number; - error?: any; -} - -export interface CreateDocsResponse { - success: boolean; - remainder: number; - docs: ImportDoc[]; - error?: any; -} - -export abstract class Importer { - private _settings: Settings; - private _mappings: Mappings; - private _pipeline: IngestPipeline; - +export abstract class Importer implements IImporter { protected _docArray: ImportDoc[] = []; - constructor({ settings, mappings, pipeline }: ImportConfig) { - this._settings = settings; - this._mappings = mappings; - this._pipeline = pipeline; - } - public read(data: ArrayBuffer) { const decoder = new TextDecoder(); - const size = STRING_CHUNKS_MB * Math.pow(2, 20); + const size = STRING_CHUNKS_MB * MB; // chop the data up into 100MB chunks for processing. // if the chop produces a partial line at the end, a character "remainder" count @@ -82,10 +54,12 @@ export abstract class Importer { protected abstract _createDocs(t: string): CreateDocsResponse; - public async initializeImport(index: string) { - const settings = this._settings; - const mappings = this._mappings; - const pipeline = this._pipeline; + public async initializeImport( + index: string, + settings: Settings, + mappings: Mappings, + pipeline: IngestPipeline + ) { updatePipelineTimezone(pipeline); // if no pipeline has been supplied, @@ -98,7 +72,7 @@ export abstract class Importer { } : {}; - const createIndexResp = await ml.fileDatavisualizer.import({ + return await callImportRoute({ id: undefined, index, data: [], @@ -106,8 +80,6 @@ export abstract class Importer { mappings, ingestPipeline, }); - - return createIndexResp; } public async import( @@ -119,12 +91,9 @@ export abstract class Importer { if (!id || !index) { return { success: false, - error: i18n.translate( - 'xpack.ml.fileDatavisualizer.importView.noIdOrIndexSuppliedErrorMessage', - { - defaultMessage: 'no ID or index supplied', - } - ), + error: i18n.translate('xpack.fileUpload.import.noIdOrIndexSuppliedErrorMessage', { + defaultMessage: 'no ID or index supplied', + }), }; } @@ -139,15 +108,6 @@ export abstract class Importer { let error; for (let i = 0; i < chunks.length; i++) { - const aggs = { - id, - index, - data: chunks[i], - settings: {}, - mappings: {}, - ingestPipeline, - }; - let retries = IMPORT_RETRIES; let resp: ImportResponse = { success: false, @@ -160,7 +120,14 @@ export abstract class Importer { while (resp.success === false && retries > 0) { try { - resp = await ml.fileDatavisualizer.import(aggs); + resp = await callImportRoute({ + id, + index, + data: chunks[i], + settings: {}, + mappings: {}, + ingestPipeline, + }); if (retries < IMPORT_RETRIES) { // eslint-disable-next-line no-console @@ -264,3 +231,38 @@ function createDocumentChunks(docArray: ImportDoc[]) { } return chunks; } + +function callImportRoute({ + id, + index, + data, + settings, + mappings, + ingestPipeline, +}: { + id: string | undefined; + index: string; + data: ImportDoc[]; + settings: Settings | unknown; + mappings: Mappings | unknown; + ingestPipeline: { + id?: string; + pipeline?: IngestPipeline; + }; +}) { + const query = id !== undefined ? { id } : {}; + const body = JSON.stringify({ + index, + data, + settings, + mappings, + ingestPipeline, + }); + + return getHttp().fetch({ + path: `/api/file_upload/import`, + method: 'POST', + query, + body, + }); +} diff --git a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/importer_factory.ts b/x-pack/plugins/file_upload/public/importer/importer_factory.ts similarity index 67% rename from x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/importer_factory.ts rename to x-pack/plugins/file_upload/public/importer/importer_factory.ts index 6646f967825fbb..8d9432c697fe14 100644 --- a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/importer_factory.ts +++ b/x-pack/plugins/file_upload/public/importer/importer_factory.ts @@ -7,14 +7,9 @@ import { MessageImporter } from './message_importer'; import { NdjsonImporter } from './ndjson_importer'; -import { ImportConfig } from './importer'; -import { FindFileStructureResponse } from '../../../../../../../common/types/file_datavisualizer'; +import { ImportFactoryOptions } from './types'; -export function importerFactory( - format: string, - results: FindFileStructureResponse, - settings: ImportConfig -) { +export function importerFactory(format: string, options: ImportFactoryOptions) { switch (format) { // delimited and semi-structured text are both handled by splitting the // file into messages, then sending these to ES for further processing @@ -22,9 +17,9 @@ export function importerFactory( // field (like Filebeat does) case 'delimited': case 'semi_structured_text': - return new MessageImporter(results, settings); + return new MessageImporter(options); case 'ndjson': - return new NdjsonImporter(results, settings); + return new NdjsonImporter(); default: return; } diff --git a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/index.ts b/x-pack/plugins/file_upload/public/importer/index.ts similarity index 92% rename from x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/index.ts rename to x-pack/plugins/file_upload/public/importer/index.ts index 6d33a0eeb5ab3a..face822f91efba 100644 --- a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/index.ts +++ b/x-pack/plugins/file_upload/public/importer/index.ts @@ -6,3 +6,4 @@ */ export { importerFactory } from './importer_factory'; +export * from './types'; diff --git a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/message_importer.ts b/x-pack/plugins/file_upload/public/importer/message_importer.ts similarity index 83% rename from x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/message_importer.ts rename to x-pack/plugins/file_upload/public/importer/message_importer.ts index 8692e2b9cecd23..f3855340f87fa3 100644 --- a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/message_importer.ts +++ b/x-pack/plugins/file_upload/public/importer/message_importer.ts @@ -5,25 +5,23 @@ * 2.0. */ -import { Importer, ImportConfig, CreateDocsResponse } from './importer'; -import { FindFileStructureResponse } from '../../../../../../../common/types/file_datavisualizer'; -import { Doc } from '../../../../../../../../file_upload/common'; +import { Importer } from './importer'; +import { Doc } from '../../common'; +import { CreateDocsResponse, ImportFactoryOptions } from './types'; export class MessageImporter extends Importer { private _excludeLinesRegex: RegExp | null; private _multilineStartRegex: RegExp | null; - constructor(results: FindFileStructureResponse, settings: ImportConfig) { - super(settings); + constructor(options: ImportFactoryOptions) { + super(); this._excludeLinesRegex = - results.exclude_lines_pattern === undefined - ? null - : new RegExp(results.exclude_lines_pattern); + options.excludeLinesPattern === undefined ? null : new RegExp(options.excludeLinesPattern); this._multilineStartRegex = - results.multiline_start_pattern === undefined + options.multilineStartPattern === undefined ? null - : new RegExp(results.multiline_start_pattern); + : new RegExp(options.multilineStartPattern); } // split the text into an array of lines by looking for newlines. diff --git a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/ndjson_importer.ts b/x-pack/plugins/file_upload/public/importer/ndjson_importer.ts similarity index 83% rename from x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/ndjson_importer.ts rename to x-pack/plugins/file_upload/public/importer/ndjson_importer.ts index 661f3f9179e494..7129a07440cf33 100644 --- a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/ndjson_importer.ts +++ b/x-pack/plugins/file_upload/public/importer/ndjson_importer.ts @@ -5,12 +5,12 @@ * 2.0. */ -import { Importer, ImportConfig, CreateDocsResponse } from './importer'; -import { FindFileStructureResponse } from '../../../../../../../common/types/file_datavisualizer'; +import { Importer } from './importer'; +import { CreateDocsResponse } from './types'; export class NdjsonImporter extends Importer { - constructor(results: FindFileStructureResponse, settings: ImportConfig) { - super(settings); + constructor() { + super(); } protected _createDocs(json: string): CreateDocsResponse { diff --git a/x-pack/plugins/file_upload/public/importer/types.ts b/x-pack/plugins/file_upload/public/importer/types.ts new file mode 100644 index 00000000000000..a2baee6b1dcd01 --- /dev/null +++ b/x-pack/plugins/file_upload/public/importer/types.ts @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { IngestPipeline, ImportDoc, ImportResponse, Mappings, Settings } from '../../common'; + +export interface ImportConfig { + settings: Settings; + mappings: Mappings; + pipeline: IngestPipeline; +} + +export interface ImportResults { + success: boolean; + failures?: any[]; + docCount?: number; + error?: any; +} + +export interface CreateDocsResponse { + success: boolean; + remainder: number; + docs: ImportDoc[]; + error?: any; +} + +export interface ImportFactoryOptions { + excludeLinesPattern?: string; + multilineStartPattern?: string; + importConfig: ImportConfig; +} + +export interface IImporter { + read(data: ArrayBuffer): { success: boolean }; + initializeImport( + index: string, + settings: Settings, + mappings: Mappings, + pipeline: IngestPipeline + ): Promise; + import( + id: string, + index: string, + pipelineId: string, + setImportProgress: (progress: number) => void + ): Promise; +} diff --git a/x-pack/plugins/file_upload/public/index.ts b/x-pack/plugins/file_upload/public/index.ts index efabc984e0220d..0c81779130d874 100644 --- a/x-pack/plugins/file_upload/public/index.ts +++ b/x-pack/plugins/file_upload/public/index.ts @@ -13,5 +13,7 @@ export function plugin() { export * from '../common'; -export { StartContract } from './plugin'; -export { FileUploadComponentProps } from './get_file_upload_component'; +export * from './importer/types'; + +export { FileUploadPluginStart } from './plugin'; +export { FileUploadComponentProps } from './lazy_load_bundle'; diff --git a/x-pack/plugins/file_upload/public/kibana_services.js b/x-pack/plugins/file_upload/public/kibana_services.js deleted file mode 100644 index 88e1b16eb062ae..00000000000000 --- a/x-pack/plugins/file_upload/public/kibana_services.js +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -export let indexPatternService; -export let savedObjectsClient; -export let basePath; -export let kbnFetch; - -export const setupInitServicesAndConstants = ({ http }) => { - basePath = http.basePath.basePath; - kbnFetch = http.fetch; -}; - -export const startInitServicesAndConstants = ({ savedObjects }, { data }) => { - indexPatternService = data.indexPatterns; - savedObjectsClient = savedObjects.client; -}; diff --git a/x-pack/plugins/file_upload/public/kibana_services.ts b/x-pack/plugins/file_upload/public/kibana_services.ts new file mode 100644 index 00000000000000..c007c5c2273a8a --- /dev/null +++ b/x-pack/plugins/file_upload/public/kibana_services.ts @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { CoreStart } from 'kibana/public'; +import { FileUploadStartDependencies } from './plugin'; + +let coreStart: CoreStart; +let pluginsStart: FileUploadStartDependencies; +export function setStartServices(core: CoreStart, plugins: FileUploadStartDependencies) { + coreStart = core; + pluginsStart = plugins; +} + +export const getIndexPatternService = () => pluginsStart.data.indexPatterns; +export const getHttp = () => coreStart.http; +export const getSavedObjectsClient = () => coreStart.savedObjects.client; diff --git a/x-pack/plugins/file_upload/public/lazy_load_bundle/index.ts b/x-pack/plugins/file_upload/public/lazy_load_bundle/index.ts new file mode 100644 index 00000000000000..9cfc0896f5c2de --- /dev/null +++ b/x-pack/plugins/file_upload/public/lazy_load_bundle/index.ts @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; +import { FeatureCollection } from 'geojson'; +import { IndexPattern } from 'src/plugins/data/public'; +import { IImporter, ImportFactoryOptions, ImportResults } from '../importer'; + +export interface FileUploadComponentProps { + isIndexingTriggered: boolean; + onFileUpload: (geojsonFile: FeatureCollection, name: string) => void; + onFileRemove: () => void; + onIndexReady: (indexReady: boolean) => void; + onIndexingComplete: (results: { + indexDataResp: ImportResults; + indexPattern: IndexPattern; + }) => void; + onIndexingError: () => void; +} + +let loadModulesPromise: Promise; + +interface LazyLoadedFileUploadModules { + JsonUploadAndParse: React.ComponentType; + importerFactory: (format: string, options: ImportFactoryOptions) => IImporter | undefined; +} + +export async function lazyLoadFileUploadModules(): Promise { + if (typeof loadModulesPromise !== 'undefined') { + return loadModulesPromise; + } + + loadModulesPromise = new Promise(async (resolve) => { + const { JsonUploadAndParse, importerFactory } = await import('./lazy'); + + resolve({ + JsonUploadAndParse, + importerFactory, + }); + }); + return loadModulesPromise; +} diff --git a/x-pack/plugins/file_upload/public/lazy_load_bundle/lazy/index.ts b/x-pack/plugins/file_upload/public/lazy_load_bundle/lazy/index.ts new file mode 100644 index 00000000000000..36df353f65d8c4 --- /dev/null +++ b/x-pack/plugins/file_upload/public/lazy_load_bundle/lazy/index.ts @@ -0,0 +1,10 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +// @ts-expect-error +export { JsonUploadAndParse } from '../../components/json_upload_and_parse'; +export { importerFactory } from '../../importer'; diff --git a/x-pack/plugins/file_upload/public/plugin.ts b/x-pack/plugins/file_upload/public/plugin.ts index d66e249ce11736..5d3918193d48a1 100644 --- a/x-pack/plugins/file_upload/public/plugin.ts +++ b/x-pack/plugins/file_upload/public/plugin.ts @@ -5,34 +5,35 @@ * 2.0. */ -import React from 'react'; -import { CoreSetup, CoreStart, Plugin } from 'kibana/server'; -import { FileUploadComponentProps, getFileUploadComponent } from './get_file_upload_component'; -// @ts-ignore -import { setupInitServicesAndConstants, startInitServicesAndConstants } from './kibana_services'; -import { IDataPluginServices } from '../../../../src/plugins/data/public'; +import { CoreStart, Plugin } from '../../../../src/core/public'; +import { FileUploadStartApi, getFileUploadComponent, importerFactory } from './api'; +import { setStartServices } from './kibana_services'; +import { DataPublicPluginStart } from '../../../../src/plugins/data/public'; // eslint-disable-next-line @typescript-eslint/no-empty-interface -export interface SetupDependencies {} -export interface StartDependencies { - data: IDataPluginServices; +export interface FileUploadSetupDependencies {} +export interface FileUploadStartDependencies { + data: DataPublicPluginStart; } -export type SetupContract = ReturnType; -export interface StartContract { - getFileUploadComponent: () => Promise>; -} +export type FileUploadPluginSetup = ReturnType; +export type FileUploadPluginStart = ReturnType; export class FileUploadPlugin - implements Plugin { - public setup(core: CoreSetup, plugins: SetupDependencies) { - setupInitServicesAndConstants(core); - } + implements + Plugin< + FileUploadPluginSetup, + FileUploadPluginStart, + FileUploadSetupDependencies, + FileUploadStartDependencies + > { + public setup() {} - public start(core: CoreStart, plugins: StartDependencies) { - startInitServicesAndConstants(core, plugins); + public start(core: CoreStart, plugins: FileUploadStartDependencies): FileUploadStartApi { + setStartServices(core, plugins); return { getFileUploadComponent, + importerFactory, }; } } diff --git a/x-pack/plugins/file_upload/public/util/file_parser.js b/x-pack/plugins/file_upload/public/util/file_parser.js deleted file mode 100644 index 7488533bd63454..00000000000000 --- a/x-pack/plugins/file_upload/public/util/file_parser.js +++ /dev/null @@ -1,152 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import _ from 'lodash'; -import { geoJsonCleanAndValidate } from './geo_json_clean_and_validate'; -import { i18n } from '@kbn/i18n'; -import { JSONLoader } from '@loaders.gl/json'; -import { loadInBatches } from '@loaders.gl/core'; - -export const fileHandler = async ({ - file, - setFileProgress, - cleanAndValidate, - getFileParseActive, -}) => { - const filePromise = new Promise(async (resolve, reject) => { - if (!file) { - reject( - new Error( - i18n.translate('xpack.fileUpload.fileParser.noFileProvided', { - defaultMessage: 'Error, no file provided', - }) - ) - ); - return; - } - - const batches = await loadInBatches(file, JSONLoader, { - json: { - jsonpaths: ['$.features'], - _rootObjectBatches: true, - }, - }); - - let featuresProcessed = 0; - const features = []; - const errors = []; - let boolGeometryErrs = false; - let parsedGeojson; - for await (const batch of batches) { - if (getFileParseActive()) { - switch (batch.batchType) { - case 'root-object-batch-complete': - if (!getFileParseActive()) { - resolve(null); - return; - } - if (featuresProcessed) { - parsedGeojson = { ...batch.container, features }; - } else { - // Handle single feature geoJson - const cleanedSingleFeature = cleanAndValidate(batch.container); - if (cleanedSingleFeature.geometry && cleanedSingleFeature.geometry.type) { - parsedGeojson = cleanedSingleFeature; - featuresProcessed++; - } - } - break; - default: - for (const feature of batch.data) { - if (!feature.geometry || !feature.geometry.type) { - if (!boolGeometryErrs) { - boolGeometryErrs = true; - errors.push( - new Error( - i18n.translate('xpack.fileUpload.fileParser.featuresOmitted', { - defaultMessage: 'Some features without geometry omitted', - }) - ) - ); - } - } else { - const cleanFeature = cleanAndValidate(feature); - features.push(cleanFeature); - featuresProcessed++; - } - } - } - setFileProgress({ - featuresProcessed, - bytesProcessed: batch.bytesUsed, - totalBytes: file.size, - }); - } else { - break; - } - } - - if (!featuresProcessed && getFileParseActive()) { - reject( - new Error( - i18n.translate('xpack.fileUpload.fileParser.noFeaturesDetected', { - defaultMessage: 'Error, no features detected', - }) - ) - ); - } else if (!getFileParseActive()) { - resolve(null); - } else { - resolve({ - errors, - parsedGeojson, - }); - } - }); - - return filePromise; -}; - -export function jsonPreview(fileResults, previewFunction) { - if (fileResults && fileResults.parsedGeojson && previewFunction) { - const defaultName = _.get(fileResults.parsedGeojson, 'name', 'Import File'); - previewFunction(fileResults.parsedGeojson, defaultName); - } -} - -export async function parseFile({ - file, - transformDetails, - onFileUpload: previewCallback = null, - setFileProgress, - getFileParseActive, -}) { - let cleanAndValidate; - if (typeof transformDetails === 'object') { - cleanAndValidate = transformDetails.cleanAndValidate; - } else { - switch (transformDetails) { - case 'geo': - cleanAndValidate = geoJsonCleanAndValidate; - break; - default: - throw i18n.translate('xpack.fileUpload.fileParser.transformDetailsNotDefined', { - defaultMessage: 'Index options for {transformDetails} not defined', - values: { transformDetails }, - }); - } - } - - const fileResults = await fileHandler({ - file, - setFileProgress, - cleanAndValidate, - getFileParseActive, - }); - jsonPreview(fileResults, previewCallback); - return fileResults; -} diff --git a/x-pack/plugins/file_upload/public/util/file_parser.test.js b/x-pack/plugins/file_upload/public/util/file_parser.test.js deleted file mode 100644 index 876cec9a7cc654..00000000000000 --- a/x-pack/plugins/file_upload/public/util/file_parser.test.js +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import { fileHandler } from './file_parser'; -import '@loaders.gl/polyfills'; - -const cleanAndValidate = jest.fn((a) => a); -const setFileProgress = jest.fn((a) => a); - -const testJson = { - type: 'Feature', - geometry: { - type: 'Polygon', - coordinates: [ - [ - [-104.05, 78.99], - [-87.22, 78.98], - [-86.58, 75.94], - [-104.03, 75.94], - [-104.05, 78.99], - ], - ], - }, -}; - -const getFileRef = (geoJsonObj = testJson) => { - const fileContent = JSON.stringify(geoJsonObj); - return new File([fileContent], 'test.json', { type: 'text/json' }); -}; - -const getFileParseActiveFactory = (boolActive = true) => { - return jest.fn(() => boolActive); -}; - -describe('parse file', () => { - afterEach(() => { - jest.resetAllMocks(); - jest.restoreAllMocks(); - }); - - it('should reject and throw error if no file provided', async () => { - await fileHandler({ file: null }).catch((e) => { - expect(e.message).toMatch('Error, no file provided'); - }); - }); - - it('should abort and resolve to null if file parse cancelled', async () => { - const fileRef = getFileRef(); - - // Cancel file parse - const getFileParseActive = getFileParseActiveFactory(false); - - const fileHandlerResult = await fileHandler({ - file: fileRef, - setFileProgress, - cleanAndValidate, - getFileParseActive, - }); - - expect(fileHandlerResult).toBeNull(); - }); - - it('should normally read single feature valid data', async () => { - const fileRef = getFileRef(); - const getFileParseActive = getFileParseActiveFactory(); - const { errors } = await fileHandler({ - file: fileRef, - setFileProgress, - cleanAndValidate: (x) => x, - getFileParseActive, - }); - - expect(setFileProgress.mock.calls.length).toEqual(1); - expect(errors.length).toEqual(0); - }); - - it('should normally read a valid single feature file', async () => { - const testSinglePointJson = { - type: 'Feature', - geometry: { - type: 'Point', - coordinates: [30, 10], - }, - properties: { - name: 'Point island', - }, - }; - - const fileRef = getFileRef(testSinglePointJson); - const getFileParseActive = getFileParseActiveFactory(); - const { errors } = await fileHandler({ - file: fileRef, - setFileProgress, - cleanAndValidate: (x) => x, - getFileParseActive, - }); - - expect(setFileProgress.mock.calls.length).toEqual(1); - expect(errors.length).toEqual(0); - }); - - it('should throw if no valid features', async () => { - const fileRef = getFileRef(); - const getFileParseActive = getFileParseActiveFactory(); - - await fileHandler({ - file: fileRef, - setFileProgress, - cleanAndValidate: () => ({ not: 'the correct content' }), // Simulate clean and validate fail - getFileParseActive, - }).catch((e) => { - expect(e.message).toMatch('Error, no features detected'); - }); - }); -}); diff --git a/x-pack/plugins/file_upload/public/util/geo_processing.js b/x-pack/plugins/file_upload/public/util/geo_processing.js deleted file mode 100644 index c90c55c2b49ac3..00000000000000 --- a/x-pack/plugins/file_upload/public/util/geo_processing.js +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import _ from 'lodash'; - -export const ES_GEO_FIELD_TYPE = { - GEO_POINT: 'geo_point', - GEO_SHAPE: 'geo_shape', -}; - -export function getGeoIndexTypesForFeatures(featureTypes) { - const hasNoFeatureType = !featureTypes || !featureTypes.length; - if (hasNoFeatureType) { - return []; - } - - const isPoint = featureTypes.includes('Point') || featureTypes.includes('MultiPoint'); - if (!isPoint) { - return [ES_GEO_FIELD_TYPE.GEO_SHAPE]; - } else if (isPoint && featureTypes.length === 1) { - return [ES_GEO_FIELD_TYPE.GEO_POINT, ES_GEO_FIELD_TYPE.GEO_SHAPE]; - } - return [ES_GEO_FIELD_TYPE.GEO_SHAPE]; -} - -// Reduces & flattens geojson to coordinates and properties (if any) -export function geoJsonToEs(parsedGeojson, datatype) { - if (!parsedGeojson) { - return []; - } - const features = parsedGeojson.type === 'Feature' ? [parsedGeojson] : parsedGeojson.features; - - if (datatype === ES_GEO_FIELD_TYPE.GEO_SHAPE) { - return features.reduce((accu, { geometry, properties }) => { - const { coordinates } = geometry; - accu.push({ - coordinates: { - type: geometry.type.toLowerCase(), - coordinates: coordinates, - }, - ...(!_.isEmpty(properties) ? { ...properties } : {}), - }); - return accu; - }, []); - } else if (datatype === ES_GEO_FIELD_TYPE.GEO_POINT) { - return features.reduce((accu, { geometry, properties }) => { - const { coordinates } = geometry; - accu.push({ - coordinates, - ...(!_.isEmpty(properties) ? { ...properties } : {}), - }); - return accu; - }, []); - } else { - return []; - } -} - -export function getGeoJsonIndexingDetails(parsedGeojson, dataType) { - return { - data: geoJsonToEs(parsedGeojson, dataType), - ingestPipeline: {}, - mappings: { - properties: { - coordinates: { - type: dataType, - }, - }, - }, - settings: { - number_of_shards: 1, - }, - }; -} diff --git a/x-pack/plugins/file_upload/public/util/geo_processing.test.js b/x-pack/plugins/file_upload/public/util/geo_processing.test.js deleted file mode 100644 index 37b665c0a3e162..00000000000000 --- a/x-pack/plugins/file_upload/public/util/geo_processing.test.js +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import { ES_GEO_FIELD_TYPE, geoJsonToEs } from './geo_processing'; - -describe('geo_processing', () => { - describe('getGeoJsonToEs', () => { - const parsedPointFeature = { - type: 'Feature', - geometry: { - type: 'Point', - coordinates: [105.7, 18.9], - }, - properties: { - name: 'Dogeville', - }, - }; - - it('should convert point feature to flattened ES compatible feature', () => { - const esFeatureArr = geoJsonToEs(parsedPointFeature, ES_GEO_FIELD_TYPE.GEO_POINT); - expect(esFeatureArr).toEqual([ - { - coordinates: [105.7, 18.9], - name: 'Dogeville', - }, - ]); - }); - - it('should convert point feature collection to flattened ES compatible feature', () => { - const parsedPointFeatureCollection = { - type: 'FeatureCollection', - features: [ - { - type: 'Feature', - geometry: { - type: 'Point', - coordinates: [34.1, 15.3], - }, - properties: { - name: 'Meowsers City', - }, - }, - ], - }; - - const esFeatureArr = geoJsonToEs(parsedPointFeatureCollection, ES_GEO_FIELD_TYPE.GEO_POINT); - expect(esFeatureArr).toEqual([ - { - coordinates: [34.1, 15.3], - name: 'Meowsers City', - }, - ]); - }); - - it('should convert shape feature to flattened ES compatible feature', () => { - const parsedShapeFeature = { - type: 'Feature', - geometry: { - type: 'Polygon', - coordinates: [ - [ - [-104.05, 78.99], - [-87.22, 78.98], - [-86.58, 75.94], - [-104.03, 75.94], - [-104.05, 78.99], - ], - ], - }, - properties: { - name: 'Whiskers City', - }, - }; - - const esFeatureArr = geoJsonToEs(parsedShapeFeature, ES_GEO_FIELD_TYPE.GEO_SHAPE); - expect(esFeatureArr).toEqual([ - { - coordinates: { - coordinates: [ - [ - [-104.05, 78.99], - [-87.22, 78.98], - [-86.58, 75.94], - [-104.03, 75.94], - [-104.05, 78.99], - ], - ], - type: 'polygon', - }, - name: 'Whiskers City', - }, - ]); - }); - - it('should convert shape feature collection to flattened ES compatible feature', () => { - const parsedShapeFeatureCollection = { - type: 'FeatureCollection', - features: [ - { - type: 'Feature', - geometry: { - type: 'Polygon', - coordinates: [ - [ - [-104.05, 79.89], - [-87.22, 79.88], - [-86.58, 74.84], - [-104.03, 75.84], - [-104.05, 78.89], - ], - ], - }, - properties: { - name: 'Woof Crossing', - }, - }, - ], - }; - - const esFeatureArr = geoJsonToEs(parsedShapeFeatureCollection, ES_GEO_FIELD_TYPE.GEO_SHAPE); - expect(esFeatureArr).toEqual([ - { - coordinates: { - coordinates: [ - [ - [-104.05, 79.89], - [-87.22, 79.88], - [-86.58, 74.84], - [-104.03, 75.84], - [-104.05, 78.89], - ], - ], - type: 'polygon', - }, - name: 'Woof Crossing', - }, - ]); - }); - - it('should return an empty for an unhandled datatype', () => { - const esFeatureArr = geoJsonToEs(parsedPointFeature, 'different datatype'); - expect(esFeatureArr).toEqual([]); - }); - }); -}); diff --git a/x-pack/plugins/file_upload/public/util/http_service.js b/x-pack/plugins/file_upload/public/util/http_service.js index c3c080ddce7920..33afebc514c369 100644 --- a/x-pack/plugins/file_upload/public/util/http_service.js +++ b/x-pack/plugins/file_upload/public/util/http_service.js @@ -6,7 +6,7 @@ */ import { i18n } from '@kbn/i18n'; -import { kbnFetch } from '../kibana_services'; +import { getHttp } from '../kibana_services'; export async function http(options) { if (!(options && options.url)) { @@ -38,7 +38,7 @@ export async function http(options) { async function doFetch(url, payload) { try { - return await kbnFetch(url, payload); + return await getHttp().fetch(url, payload); } catch (err) { return { failures: [ diff --git a/x-pack/plugins/file_upload/public/util/indexing_service.js b/x-pack/plugins/file_upload/public/util/indexing_service.js index 253681dad6a7da..cb9bc9a2e1ce65 100644 --- a/x-pack/plugins/file_upload/public/util/indexing_service.js +++ b/x-pack/plugins/file_upload/public/util/indexing_service.js @@ -6,205 +6,7 @@ */ import { http as httpService } from './http_service'; -import { indexPatternService, savedObjectsClient } from '../kibana_services'; -import { getGeoJsonIndexingDetails } from './geo_processing'; -import { sizeLimitedChunking } from './size_limited_chunking'; -import { i18n } from '@kbn/i18n'; - -export async function indexData(parsedFile, transformDetails, indexName, dataType, appName) { - if (!parsedFile) { - throw i18n.translate('xpack.fileUpload.indexingService.noFileImported', { - defaultMessage: 'No file imported.', - }); - } - - // Perform any processing required on file prior to indexing - const transformResult = transformDataByFormatForIndexing(transformDetails, parsedFile, dataType); - if (!transformResult.success) { - throw i18n.translate('xpack.fileUpload.indexingService.transformResultError', { - defaultMessage: 'Error transforming data: {error}', - values: { error: transformResult.error }, - }); - } - - // Create new index - const { indexingDetails } = transformResult; - const createdIndex = await writeToIndex({ - appName, - ...indexingDetails, - id: undefined, - data: [], - index: indexName, - }); - const id = createdIndex && createdIndex.id; - try { - if (!id) { - throw i18n.translate('xpack.fileUpload.indexingService.errorCreatingIndex', { - defaultMessage: 'Error creating index', - }); - } - } catch (error) { - return { - error, - success: false, - }; - } - - // Write to index - const indexWriteResults = await chunkDataAndWriteToIndex({ - id, - index: indexName, - ...indexingDetails, - settings: {}, - mappings: {}, - }); - return indexWriteResults; -} - -function transformDataByFormatForIndexing(transform, parsedFile, dataType) { - let indexingDetails; - if (!transform) { - return { - success: false, - error: i18n.translate('xpack.fileUpload.indexingService.noTransformDefined', { - defaultMessage: 'No transform defined', - }), - }; - } - if (typeof transform !== 'object') { - switch (transform) { - case 'geo': - indexingDetails = getGeoJsonIndexingDetails(parsedFile, dataType); - break; - default: - return { - success: false, - error: i18n.translate('xpack.fileUpload.indexingService.noHandlingForTransform', { - defaultMessage: 'No handling defined for transform: {transform}', - values: { transform }, - }), - }; - } - } else { - // Custom transform - indexingDetails = transform.getIndexingDetails(parsedFile); - } - if (indexingDetails && indexingDetails.data && indexingDetails.data.length) { - return { - success: true, - indexingDetails, - }; - } else if (indexingDetails && indexingDetails.data) { - return { - success: false, - error: i18n.translate('xpack.fileUpload.indexingService.noIndexingDetailsForDatatype', { - defaultMessage: `No indexing details defined for datatype: {dataType}`, - values: { dataType }, - }), - }; - } else { - return { - success: false, - error: i18n.translate('xpack.fileUpload.indexingService.unknownTransformError', { - defaultMessage: 'Unknown error performing transform: {transform}', - values: { transform }, - }), - }; - } -} - -async function writeToIndex(indexingDetails) { - const query = indexingDetails.id ? { id: indexingDetails.id } : null; - const { index, data, settings, mappings, ingestPipeline } = indexingDetails; - - return await httpService({ - url: `/api/file_upload/import`, - method: 'POST', - ...(query ? { query } : {}), - data: { - index, - data, - settings, - mappings, - ingestPipeline, - }, - }); -} - -async function chunkDataAndWriteToIndex({ id, index, data, mappings, settings }) { - if (!index) { - return { - success: false, - error: i18n.translate('xpack.fileUpload.noIndexSuppliedErrorMessage', { - defaultMessage: 'No index provided.', - }), - }; - } - - const chunks = sizeLimitedChunking(data); - - let success = true; - let failures = []; - let error; - let docCount = 0; - - for (let i = 0; i < chunks.length; i++) { - const aggs = { - id, - index, - data: chunks[i], - settings, - mappings, - ingestPipeline: {}, // TODO: Support custom ingest pipelines - }; - - let resp = { - success: false, - failures: [], - docCount: 0, - }; - resp = await writeToIndex(aggs); - - failures = [...failures, ...resp.failures]; - if (resp.success) { - ({ success } = resp); - docCount = docCount + resp.docCount; - } else { - success = false; - error = resp.error; - docCount = 0; - break; - } - } - - return { - success, - failures, - docCount, - ...(error ? { error } : {}), - }; -} - -export async function createIndexPattern(indexPatternName) { - try { - const indexPattern = await indexPatternService.createAndSave( - { - title: indexPatternName, - }, - true - ); - return { - success: true, - id: indexPattern.id, - fields: indexPattern.fields, - }; - } catch (error) { - return { - success: false, - error, - }; - } -} +import { getSavedObjectsClient } from '../kibana_services'; export const getExistingIndexNames = async () => { const indexes = await httpService({ @@ -215,7 +17,7 @@ export const getExistingIndexNames = async () => { }; export const getExistingIndexPatternNames = async () => { - const indexPatterns = await savedObjectsClient + const indexPatterns = await getSavedObjectsClient() .find({ type: 'index-pattern', fields: ['id', 'title', 'type', 'fields'], diff --git a/x-pack/plugins/file_upload/public/util/size_limited_chunking.js b/x-pack/plugins/file_upload/public/util/size_limited_chunking.js deleted file mode 100644 index 09d4e8ca8e3a2e..00000000000000 --- a/x-pack/plugins/file_upload/public/util/size_limited_chunking.js +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -const MAX_BYTES = 31457280; - -// MAX_BYTES is a good guideline for splitting up posts, but this logic -// occasionally sizes chunks so closely to the limit, that the remaining content -// of a post (besides features) tips it over the max. Adding a 2MB buffer -// to ensure this doesn't happen -const CHUNK_BUFFER = 2097152; - -// Add data elements to chunk until limit is met -export function sizeLimitedChunking(dataArr, maxByteSize = MAX_BYTES - CHUNK_BUFFER) { - let chunkSize = 0; - - return dataArr.reduce( - (accu, el) => { - const featureByteSize = new Blob([JSON.stringify(el)], { type: 'application/json' }).size; - if (featureByteSize > maxByteSize) { - throw `Some features exceed maximum chunk size of ${maxByteSize}`; - } else if (chunkSize + featureByteSize < maxByteSize) { - const lastChunkRef = accu.length - 1; - chunkSize += featureByteSize; - accu[lastChunkRef].push(el); - } else { - chunkSize = featureByteSize; - accu.push([el]); - } - return accu; - }, - [[]] - ); -} diff --git a/x-pack/plugins/file_upload/public/util/size_limited_chunking.test.js b/x-pack/plugins/file_upload/public/util/size_limited_chunking.test.js deleted file mode 100644 index a87c7a93ad83ae..00000000000000 --- a/x-pack/plugins/file_upload/public/util/size_limited_chunking.test.js +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import { sizeLimitedChunking } from './size_limited_chunking'; - -describe('size_limited_chunking', () => { - // 1000 elements where element value === index - const testArr = Array.from(Array(1000), (_, x) => x); - - it('should limit each sub-array to the max chunk size', () => { - // Confirm valid geometry - const chunkLimit = 100; - const chunkedArr = sizeLimitedChunking(testArr, chunkLimit); - chunkedArr.forEach((sizeLimitedArr) => { - const arrByteSize = new Blob(sizeLimitedArr, { type: 'application/json' }).size; - - // Chunk size should be less than chunk limit - expect(arrByteSize).toBeLessThan(chunkLimit); - // # of arrays generated should be greater than original array length - // divided by chunk limit - expect(chunkedArr.length).toBeGreaterThanOrEqual(testArr.length / chunkLimit); - }); - }); -}); diff --git a/x-pack/plugins/maps/public/classes/layers/file_upload_wizard/wizard.tsx b/x-pack/plugins/maps/public/classes/layers/file_upload_wizard/wizard.tsx index 44a22f1529f189..138ed7a8cd0b14 100644 --- a/x-pack/plugins/maps/public/classes/layers/file_upload_wizard/wizard.tsx +++ b/x-pack/plugins/maps/public/classes/layers/file_upload_wizard/wizard.tsx @@ -8,7 +8,7 @@ import React, { Component } from 'react'; import { FeatureCollection } from 'geojson'; import { EuiPanel } from '@elastic/eui'; -import { IFieldType } from 'src/plugins/data/public'; +import { IndexPattern, IFieldType } from 'src/plugins/data/public'; import { ES_GEO_FIELD_TYPE, DEFAULT_MAX_RESULT_WINDOW, @@ -19,7 +19,7 @@ import { GeoJsonFileSource } from '../../sources/geojson_file_source'; import { VectorLayer } from '../../layers/vector_layer'; import { createDefaultLayerDescriptor } from '../../sources/es_search_source'; import { RenderWizardArguments } from '../../layers/layer_wizard_registry'; -import { FileUploadComponentProps } from '../../../../../file_upload/public'; +import { FileUploadComponentProps, ImportResults } from '../../../../../file_upload/public'; export const INDEX_SETUP_STEP_ID = 'INDEX_SETUP_STEP_ID'; export const INDEXING_STEP_ID = 'INDEXING_STEP_ID'; @@ -91,43 +91,28 @@ export class ClientFileCreateSourceEditor extends Component { + _onIndexingComplete = (results: { indexDataResp: ImportResults; indexPattern: IndexPattern }) => { if (!this._isMounted) { return; } this.props.advanceToNextStep(); - const { indexDataResp, indexPatternResp } = indexResponses; - - // @ts-ignore - const indexCreationFailed = !(indexDataResp && indexDataResp.success); - // @ts-ignore - const allDocsFailed = indexDataResp.failures.length === indexDataResp.docCount; - // @ts-ignore - const indexPatternCreationFailed = !(indexPatternResp && indexPatternResp.success); - if (indexCreationFailed || allDocsFailed || indexPatternCreationFailed) { - this.setState({ indexingStage: INDEXING_STAGE.ERROR }); - return; - } - - // @ts-ignore - const { fields, id: indexPatternId } = indexPatternResp; - const geoField = fields.find((field: IFieldType) => + const geoField = results.indexPattern.fields.find((field: IFieldType) => [ES_GEO_FIELD_TYPE.GEO_POINT as string, ES_GEO_FIELD_TYPE.GEO_SHAPE as string].includes( field.type ) ); - if (!indexPatternId || !geoField) { + if (!results.indexPattern.id || !geoField) { this.setState({ indexingStage: INDEXING_STAGE.ERROR }); this.props.previewLayers([]); } else { const esSearchSourceConfig = { - indexPatternId, + indexPatternId: results.indexPattern.id, geoField: geoField.name, // Only turn on bounds filter for large doc counts // @ts-ignore - filterByMapBounds: indexDataResp.docCount > DEFAULT_MAX_RESULT_WINDOW, + filterByMapBounds: results.indexDataResp.docCount > DEFAULT_MAX_RESULT_WINDOW, scalingType: geoField.type === ES_GEO_FIELD_TYPE.GEO_POINT ? SCALING_TYPES.CLUSTERS @@ -140,6 +125,16 @@ export class ClientFileCreateSourceEditor extends Component { + if (!this._isMounted) { + return; + } + + this.props.advanceToNextStep(); + + this.setState({ indexingStage: INDEXING_STAGE.ERROR }); + }; + // Called on file upload screen when UI state changes _onIndexReady = (indexReady: boolean) => { if (!this._isMounted) { @@ -167,13 +162,12 @@ export class ClientFileCreateSourceEditor extends Component ); diff --git a/x-pack/plugins/maps/public/plugin.ts b/x-pack/plugins/maps/public/plugin.ts index 12cff9edf55ff6..d423acf67bcd47 100644 --- a/x-pack/plugins/maps/public/plugin.ts +++ b/x-pack/plugins/maps/public/plugin.ts @@ -54,7 +54,7 @@ import { EmbeddableStart } from '../../../../src/plugins/embeddable/public'; import { MapsLegacyConfig } from '../../../../src/plugins/maps_legacy/config'; import { DataPublicPluginStart } from '../../../../src/plugins/data/public'; import { LicensingPluginSetup, LicensingPluginStart } from '../../licensing/public'; -import { StartContract as FileUploadStartContract } from '../../file_upload/public'; +import { FileUploadPluginStart } from '../../file_upload/public'; import { SavedObjectsStart } from '../../../../src/plugins/saved_objects/public'; import { PresentationUtilPluginStart } from '../../../../src/plugins/presentation_util/public'; import { @@ -80,7 +80,7 @@ export interface MapsPluginStartDependencies { charts: ChartsPluginStart; data: DataPublicPluginStart; embeddable: EmbeddableStart; - fileUpload: FileUploadStartContract; + fileUpload: FileUploadPluginStart; inspector: InspectorStartContract; licensing: LicensingPluginStart; navigation: NavigationPublicPluginStart; diff --git a/x-pack/plugins/ml/public/application/app.tsx b/x-pack/plugins/ml/public/application/app.tsx index 3df67bc16ab058..107bbda23ecb90 100644 --- a/x-pack/plugins/ml/public/application/app.tsx +++ b/x-pack/plugins/ml/public/application/app.tsx @@ -124,6 +124,7 @@ export const renderApp = ( security: deps.security, urlGenerators: deps.share.urlGenerators, maps: deps.maps, + fileUpload: deps.fileUpload, }); appMountParams.onAppLeave((actions) => actions.default()); diff --git a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/import_view.js b/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/import_view.js index 0aadf9e17f30db..28f4f2e2ba9e44 100644 --- a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/import_view.js +++ b/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/import_view.js @@ -20,7 +20,7 @@ import { import { i18n } from '@kbn/i18n'; import { debounce } from 'lodash'; -import { importerFactory } from './importer'; +import { getFileUpload } from '../../../../util/dependency_cache'; import { ResultsLinks } from '../results_links'; import { FilebeatConfigFlyout } from '../filebeat_config_flyout'; import { ImportProgress, IMPORT_STATUS } from '../import_progress'; @@ -187,15 +187,9 @@ export class ImportView extends Component { errors.push(`${parseError} ${error.message}`); } - const indexCreationSettings = { - settings, - mappings, - }; - try { if (createPipeline) { pipeline = JSON.parse(pipelineString); - indexCreationSettings.pipeline = pipeline; } } catch (error) { success = false; @@ -222,7 +216,10 @@ export class ImportView extends Component { } if (success) { - const importer = importerFactory(format, results, indexCreationSettings); + const importer = await getFileUpload().importerFactory(format, { + excludeLinesPattern: results.exclude_lines_pattern, + multilineStartPattern: results.multiline_start_pattern, + }); if (importer !== undefined) { const readResp = importer.read(data, this.setReadProgress); success = readResp.success; @@ -237,7 +234,12 @@ export class ImportView extends Component { } if (success) { - const initializeImportResp = await importer.initializeImport(index); + const initializeImportResp = await importer.initializeImport( + index, + settings, + mappings, + pipeline + ); const indexCreated = initializeImportResp.index !== undefined; this.setState({ diff --git a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/utils/utils.ts b/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/utils/utils.ts index 4412390d62c1fc..2c1b02b53354aa 100644 --- a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/utils/utils.ts +++ b/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/utils/utils.ts @@ -11,6 +11,7 @@ import numeral from '@elastic/numeral'; import { ml } from '../../../../services/ml_api_service'; import { AnalysisResult, InputOverrides } from '../../../../../../common/types/file_datavisualizer'; import { + MB, MAX_FILE_SIZE, MAX_FILE_SIZE_BYTES, ABSOLUTE_MAX_FILE_SIZE_BYTES, @@ -49,7 +50,7 @@ export function readFile(file: File) { if (data === null || typeof data === 'string') { return reject(); } - const size = UPLOAD_SIZE_MB * Math.pow(2, 20); + const size = UPLOAD_SIZE_MB * MB; const fileContents = decoder.decode(data.slice(0, size)); if (fileContents === '') { diff --git a/x-pack/plugins/ml/public/application/services/ml_api_service/datavisualizer.ts b/x-pack/plugins/ml/public/application/services/ml_api_service/datavisualizer.ts index ce647b5401b0bb..98a0d7b9b0a94f 100644 --- a/x-pack/plugins/ml/public/application/services/ml_api_service/datavisualizer.ts +++ b/x-pack/plugins/ml/public/application/services/ml_api_service/datavisualizer.ts @@ -8,7 +8,6 @@ import { http } from '../http_service'; import { basePath } from './index'; -import { ImportResponse } from '../../../../../file_upload/common'; export const fileDatavisualizer = { analyzeFile(file: string, params: Record = {}) { @@ -20,36 +19,4 @@ export const fileDatavisualizer = { query: params, }); }, - - import({ - id, - index, - data, - settings, - mappings, - ingestPipeline, - }: { - id: string | undefined; - index: string; - data: any; - settings: any; - mappings: any; - ingestPipeline: any; - }) { - const query = id !== undefined ? { id } : {}; - const body = JSON.stringify({ - index, - data, - settings, - mappings, - ingestPipeline, - }); - - return http({ - path: `/api/file_upload/import`, - method: 'POST', - query, - body, - }); - }, }; diff --git a/x-pack/plugins/ml/public/application/util/dependency_cache.ts b/x-pack/plugins/ml/public/application/util/dependency_cache.ts index 17e5c50c4b00c8..215f087020d6fa 100644 --- a/x-pack/plugins/ml/public/application/util/dependency_cache.ts +++ b/x-pack/plugins/ml/public/application/util/dependency_cache.ts @@ -23,6 +23,7 @@ import type { IndexPatternsContract, DataPublicPluginStart } from 'src/plugins/d import type { SharePluginStart } from 'src/plugins/share/public'; import type { SecurityPluginSetup } from '../../../../security/public'; import type { MapsStartApi } from '../../../../maps/public'; +import type { FileUploadPluginStart } from '../../../../file_upload/public'; export interface DependencyCache { timefilter: DataPublicPluginSetup['query']['timefilter'] | null; @@ -43,6 +44,7 @@ export interface DependencyCache { i18n: I18nStart | null; urlGenerators: SharePluginStart['urlGenerators'] | null; maps: MapsStartApi | null; + fileUpload: FileUploadPluginStart | null; } const cache: DependencyCache = { @@ -64,6 +66,7 @@ const cache: DependencyCache = { i18n: null, urlGenerators: null, maps: null, + fileUpload: null, }; export function setDependencyCache(deps: Partial) { @@ -84,6 +87,7 @@ export function setDependencyCache(deps: Partial) { cache.security = deps.security || null; cache.i18n = deps.i18n || null; cache.urlGenerators = deps.urlGenerators || null; + cache.fileUpload = deps.fileUpload || null; } export function getTimefilter() { @@ -209,3 +213,10 @@ export function clearCache() { cache[k as keyof DependencyCache] = null; }); } + +export function getFileUpload() { + if (cache.fileUpload === null) { + throw new Error("fileUpload hasn't been initialized"); + } + return cache.fileUpload; +} diff --git a/x-pack/plugins/ml/public/plugin.ts b/x-pack/plugins/ml/public/plugin.ts index 212d6fe13a6b4b..f6d5da92f5e715 100644 --- a/x-pack/plugins/ml/public/plugin.ts +++ b/x-pack/plugins/ml/public/plugin.ts @@ -52,6 +52,7 @@ import { TriggersAndActionsUIPublicPluginStart, } from '../../triggers_actions_ui/public'; import { registerMlAlerts } from './alerting/register_ml_alerts'; +import { FileUploadPluginStart } from '../../file_upload/public'; export interface MlStartDependencies { data: DataPublicPluginStart; @@ -63,6 +64,7 @@ export interface MlStartDependencies { maps?: MapsStartApi; lens?: LensPublicStart; triggersActionsUi?: TriggersAndActionsUIPublicPluginStart; + fileUpload: FileUploadPluginStart; } export interface MlSetupDependencies { @@ -119,6 +121,7 @@ export class MlPlugin implements Plugin { lens: pluginsStart.lens, kibanaVersion, triggersActionsUi: pluginsStart.triggersActionsUi, + fileUpload: pluginsStart.fileUpload, }, params ); diff --git a/x-pack/plugins/translations/translations/ja-JP.json b/x-pack/plugins/translations/translations/ja-JP.json index 384088f3b0512f..bf7b22122c9047 100644 --- a/x-pack/plugins/translations/translations/ja-JP.json +++ b/x-pack/plugins/translations/translations/ja-JP.json @@ -7442,19 +7442,10 @@ "xpack.features.savedObjectsManagementFeatureName": "保存されたオブジェクトの管理", "xpack.features.visualizeFeatureName": "可視化", "xpack.fileUpload.enterIndexName": "インデックス名を入力", - "xpack.fileUpload.fileParser.featuresOmitted": "ジオメトリのない一部の機能は省略されました", "xpack.fileUpload.fileParser.noFeaturesDetected": "エラー、機能が検出されませんでした", "xpack.fileUpload.fileParser.noFileProvided": "エラー、ファイルが提供されていません", - "xpack.fileUpload.fileParser.transformDetailsNotDefined": "{transformDetails}のインデックスオプションが定義されていません", "xpack.fileUpload.httpService.fetchError": "フェッチ実行エラー:{error}", "xpack.fileUpload.httpService.noUrl": "URLが指定されていません", - "xpack.fileUpload.indexingService.errorCreatingIndex": "インデックスの作成中にエラーが発生しました", - "xpack.fileUpload.indexingService.noFileImported": "ファイルはインポートされていません。", - "xpack.fileUpload.indexingService.noHandlingForTransform": "変換の処理が定義されていません。{transform}", - "xpack.fileUpload.indexingService.noIndexingDetailsForDatatype": "データ型のインデックス詳細が定義されていません。{dataType}", - "xpack.fileUpload.indexingService.noTransformDefined": "変換が定義されていません", - "xpack.fileUpload.indexingService.transformResultError": "データの変換エラー:{error}", - "xpack.fileUpload.indexingService.unknownTransformError": "変換の実行中に不明なエラーが発生しました。{transform}", "xpack.fileUpload.indexNameReqField": "インデックス名、必須フィールド", "xpack.fileUpload.indexSettings.enterIndexNameLabel": "インデックス名", "xpack.fileUpload.indexSettings.enterIndexTypeLabel": "インデックスタイプ", @@ -7479,7 +7470,6 @@ "xpack.fileUpload.jsonIndexFilePicker.filePicker": "ファイルをアップロード", "xpack.fileUpload.jsonIndexFilePicker.filePickerLabel": "アップロードするファイルを選択", "xpack.fileUpload.jsonIndexFilePicker.fileProcessingError": "ファイル処理エラー: {errorMessage}", - "xpack.fileUpload.jsonIndexFilePicker.fileSizeError": "ファイルサイズエラー:{errorMessage}", "xpack.fileUpload.jsonIndexFilePicker.formatsAccepted": "許可されているフォーマット:{acceptedFileTypeStringMessage}", "xpack.fileUpload.jsonIndexFilePicker.maxSize": "最大サイズ:{maxFileSize}", "xpack.fileUpload.jsonIndexFilePicker.noFileNameError": "ファイル名が指定されていません", @@ -7492,7 +7482,6 @@ "xpack.fileUpload.jsonUploadAndParse.indexPatternComplete": "インデックスパターンの完了", "xpack.fileUpload.jsonUploadAndParse.indexPatternError": "インデックスパターンエラー", "xpack.fileUpload.jsonUploadAndParse.writingToIndex": "インデックスに書き込み中", - "xpack.fileUpload.noIndexSuppliedErrorMessage": "インデックスが指定されていません。", "xpack.fleet.agentBulkActions.agentsSelected": "{count, plural, other {#個のエージェント}}が選択されました", "xpack.fleet.agentBulkActions.clearSelection": "選択した項目をクリア", "xpack.fleet.agentBulkActions.reassignPolicy": "新しいポリシーに割り当てる", @@ -13141,7 +13130,6 @@ "xpack.ml.fileDatavisualizer.importView.indexNameContainsIllegalCharactersErrorMessage": "インデックス名に許可されていない文字が含まれています", "xpack.ml.fileDatavisualizer.importView.indexPatternDoesNotMatchIndexNameErrorMessage": "インデックスパターンがインデックス名と一致しません", "xpack.ml.fileDatavisualizer.importView.indexPatternNameAlreadyExistsErrorMessage": "インデックスパターン名が既に存在します", - "xpack.ml.fileDatavisualizer.importView.noIdOrIndexSuppliedErrorMessage": "ID またはインデックスが提供されていません", "xpack.ml.fileDatavisualizer.importView.parseMappingsError": "マッピングのパース中にエラーが発生しました:", "xpack.ml.fileDatavisualizer.importView.parsePipelineError": "投入パイプラインのパース中にエラーが発生しました:", "xpack.ml.fileDatavisualizer.importView.parseSettingsError": "設定のパース中にエラーが発生しました:", diff --git a/x-pack/plugins/translations/translations/zh-CN.json b/x-pack/plugins/translations/translations/zh-CN.json index a5662a26253091..cde2aaf616dd4a 100644 --- a/x-pack/plugins/translations/translations/zh-CN.json +++ b/x-pack/plugins/translations/translations/zh-CN.json @@ -7461,19 +7461,10 @@ "xpack.features.savedObjectsManagementFeatureName": "已保存对象管理", "xpack.features.visualizeFeatureName": "Visualize", "xpack.fileUpload.enterIndexName": "输入索引名称", - "xpack.fileUpload.fileParser.featuresOmitted": "不具有几何形状的一些特征已省略", "xpack.fileUpload.fileParser.noFeaturesDetected": "错误,未检测到特征", "xpack.fileUpload.fileParser.noFileProvided": "错误,未提供任何文件", - "xpack.fileUpload.fileParser.transformDetailsNotDefined": "未定义 {transformDetails} 的索引选项", "xpack.fileUpload.httpService.fetchError": "执行提取时出错:{error}", "xpack.fileUpload.httpService.noUrl": "未提供 URL", - "xpack.fileUpload.indexingService.errorCreatingIndex": "创建索引时出错", - "xpack.fileUpload.indexingService.noFileImported": "未导入任何文件。", - "xpack.fileUpload.indexingService.noHandlingForTransform": "没有为转换 {transform} 定义任何处理方式", - "xpack.fileUpload.indexingService.noIndexingDetailsForDatatype": "没有为数据类型 {dataType} 定义任何索引详情", - "xpack.fileUpload.indexingService.noTransformDefined": "未定义任何转换", - "xpack.fileUpload.indexingService.transformResultError": "转换数据时出错:{error}", - "xpack.fileUpload.indexingService.unknownTransformError": "执行转换 {transform} 时出现未知错误", "xpack.fileUpload.indexNameReqField": "索引名称,必填字段", "xpack.fileUpload.indexSettings.enterIndexNameLabel": "索引名称", "xpack.fileUpload.indexSettings.enterIndexTypeLabel": "索引类型", @@ -7498,7 +7489,6 @@ "xpack.fileUpload.jsonIndexFilePicker.filePicker": "上传文件", "xpack.fileUpload.jsonIndexFilePicker.filePickerLabel": "选择文件进行上传", "xpack.fileUpload.jsonIndexFilePicker.fileProcessingError": "文件处理错误:{errorMessage}", - "xpack.fileUpload.jsonIndexFilePicker.fileSizeError": "文件大小错误:{errorMessage}", "xpack.fileUpload.jsonIndexFilePicker.formatsAccepted": "接受的格式:{acceptedFileTypeStringMessage}", "xpack.fileUpload.jsonIndexFilePicker.maxSize": "最大大小:{maxFileSize}", "xpack.fileUpload.jsonIndexFilePicker.noFileNameError": "未提供任何文件名称", @@ -7511,7 +7501,6 @@ "xpack.fileUpload.jsonUploadAndParse.indexPatternComplete": "索引模式完成", "xpack.fileUpload.jsonUploadAndParse.indexPatternError": "索引模式错误", "xpack.fileUpload.jsonUploadAndParse.writingToIndex": "正在写入索引", - "xpack.fileUpload.noIndexSuppliedErrorMessage": "未提供任何索引。", "xpack.fleet.agentBulkActions.agentsSelected": "已选择 {count, plural, other {# 个代理}}", "xpack.fleet.agentBulkActions.clearSelection": "清除所选内容", "xpack.fleet.agentBulkActions.reassignPolicy": "分配到新策略", @@ -13172,7 +13161,6 @@ "xpack.ml.fileDatavisualizer.importView.indexNameContainsIllegalCharactersErrorMessage": "索引名称包含非法字符", "xpack.ml.fileDatavisualizer.importView.indexPatternDoesNotMatchIndexNameErrorMessage": "索引模式与索引名称不匹配", "xpack.ml.fileDatavisualizer.importView.indexPatternNameAlreadyExistsErrorMessage": "索引模式名称已存在", - "xpack.ml.fileDatavisualizer.importView.noIdOrIndexSuppliedErrorMessage": "未提供任何 ID 或索引", "xpack.ml.fileDatavisualizer.importView.parseMappingsError": "解析映射时出错:", "xpack.ml.fileDatavisualizer.importView.parsePipelineError": "解析采集管道时出错:", "xpack.ml.fileDatavisualizer.importView.parseSettingsError": "解析设置时出错:", diff --git a/x-pack/test/functional/apps/maps/import_geojson/add_layer_import_panel.js b/x-pack/test/functional/apps/maps/import_geojson/add_layer_import_panel.js index 46b87b1c4195c3..b40f9a4bc233e4 100644 --- a/x-pack/test/functional/apps/maps/import_geojson/add_layer_import_panel.js +++ b/x-pack/test/functional/apps/maps/import_geojson/add_layer_import_panel.js @@ -11,7 +11,6 @@ import path from 'path'; export default function ({ getPageObjects, getService }) { const PageObjects = getPageObjects(['maps', 'common']); - const IMPORT_FILE_PREVIEW_NAME = 'Import File'; const FILE_LOAD_DIR = 'test_upload_files'; const DEFAULT_LOAD_FILE_NAME = 'point.json'; const security = getService('security'); @@ -39,8 +38,8 @@ export default function ({ getPageObjects, getService }) { }); it('should add GeoJSON file to map', async () => { - const layerLoadedInToc = await PageObjects.maps.doesLayerExist(IMPORT_FILE_PREVIEW_NAME); - expect(layerLoadedInToc).to.be(true); + const numberOfLayers = await PageObjects.maps.getNumberOfLayers(); + expect(numberOfLayers).to.be(2); const filePickerLoadedFile = await PageObjects.maps.hasFilePickerLoadedFile( DEFAULT_LOAD_FILE_NAME @@ -51,9 +50,9 @@ export default function ({ getPageObjects, getService }) { it('should remove layer on cancel', async () => { await PageObjects.maps.cancelLayerAdd(); - await PageObjects.maps.waitForLayerDeleted(IMPORT_FILE_PREVIEW_NAME); - const layerLoadedInToc = await PageObjects.maps.doesLayerExist(IMPORT_FILE_PREVIEW_NAME); - expect(layerLoadedInToc).to.be(false); + await PageObjects.maps.waitForLayerDeleted('point'); + const numberOfLayers = await PageObjects.maps.getNumberOfLayers(); + expect(numberOfLayers).to.be(1); }); it('should replace layer on input change', async () => { @@ -83,8 +82,8 @@ export default function ({ getPageObjects, getService }) { ); expect(filePickerLoadedFile).to.be(true); // Check that no file is loaded in layer preview - const layerLoadedInToc = await PageObjects.maps.doesLayerExist(IMPORT_FILE_PREVIEW_NAME); - expect(layerLoadedInToc).to.be(false); + const numberOfLayers = await PageObjects.maps.getNumberOfLayers(); + expect(numberOfLayers).to.be(1); }); it('should prevent import button from activating unless valid index name provided', async () => { diff --git a/x-pack/test/functional/page_objects/gis_page.ts b/x-pack/test/functional/page_objects/gis_page.ts index 95c7fe7a2dbc05..abf8f0238f1666 100644 --- a/x-pack/test/functional/page_objects/gis_page.ts +++ b/x-pack/test/functional/page_objects/gis_page.ts @@ -335,6 +335,11 @@ export function GisPageProvider({ getService, getPageObjects }: FtrProviderConte } } + async getNumberOfLayers() { + const tocEntries = await find.allByCssSelector('.mapTocEntry'); + return tocEntries.length; + } + async doesLayerExist(layerName: string) { return await testSubjects.exists( `layerTocActionsPanelToggleButton${escapeLayerName(layerName)}`