From db5a839e4a07871c271691110e883374dd000b78 Mon Sep 17 00:00:00 2001
From: Elena Stoeva <59341489+ElenaStoeva@users.noreply.github.com>
Date: Tue, 23 Jul 2024 19:04:16 +0100
Subject: [PATCH] [Console Monaco] Resolve uncaught error from tokenizer
(#188746)
Fixes https://github.com/elastic/kibana/issues/186765
## Summary
This PR fixes the uncaught error from the tokenizer when we add the
input below:
Test input
```
GET /
GET _cluster/settings?include_defaults&flat_settings&filter_path=*.*interval*
PUT _cluster/settings
{
"persistent": {
"indices.lifecycle.poll_interval": "10s"
}
}
GET _cat/${TEST}
PUT _data_stream/test-cold
POST test-cold/_rollover
GET _cat/indices?v&h=
GET _cat/indices/.alerts-observability.threshold.alerts-default,.alerts-default.alerts-default,
GET _cat/indices,aliases
GET _cat/shards/test*?v
"""
index shard prirep state docs store dataset ip node
.ds-test-cold-2024.05.20-000002 0 p STARTED 0 249b 249b 172.19.0.2 839a1677bdab
.ds-test-cold-2024.05.20-000002 0 r UNASSIGNED
restored-.ds-test-cold-2024.05.20-000001 0 p STARTED 0 289b 289b 172.19.0.2 839a1677bdab
"""
GET restored-.ds-test-cold-2024.05.20-000001/_settings
"""
"snapshot": {
"snapshot_name": "2024.05.20-.ds-test-cold-2024.05.20-000001-test-h5yr-_tws6es6biug0uqbw",
"index_uuid": "8tXgFf_cR2KIMofpj2I7Uw",
"repository_uuid": "3i3oPavIQZGeYOQzRZajyQ",
"index_name": ".ds-test-cold-2024.05.20-000001",
"repository_name": "fs",
"snapshot_uuid": "3S3CDGZgSh2XQSRO3UTxVA"
}
"""
DELETE _snapshot/fs/2024.05.20-.ds-test-cold-2024.05.20-000001-test-h5yr-_tws6es6biug0uqbw
POST _ilm/stop
POST restored-.ds-test-cold-2024.05.20-000001/_clone/.ds-test-cold-2024.05.20-000001
{
"settings": {
"index.store.type": null,
"index.recovery.type": null
}
}
# Check the cloned index & shards
GET _cat/indices/.ds-test-cold-2024.05.20-000001?v
GET _cat/shards/.ds-test-cold-2024.05.20-000001?v
PUT _snapshot/fs/2024.05.20-.ds-test-cold-2024.05.20-000001
{
"indices": ".ds-test-cold-2024.05.20-000001",
"ignore_unavailable": false,
"include_global_state": false
}
# Check the taken snapshot
GET _snapshot/fs/2024.05.20-.ds-test-cold-2024.05.20-000001
"""
"snapshot": "2024.05.20-.ds-test-cold-2024.05.20-000001",
"uuid": "GC7xoG1yQN6W5LIUMqCDYA",
"""
PUT _snapshot/fs/temp-snapshot-for-recovery
{
"indices": "restored-.ds-test-cold-2024.05.20-000001",
"ignore_unavailable": false,
"include_global_state": false
}
GET _snapshot/fs/temp-snapshot-for-recovery
"""
"snapshot": "temp-snapshot-for-recovery",
"uuid": "NgFDtQMESv2o4BZd0dsP8g",
"""
GET .ds-test-cold-2024.05.20-000002
"""
{
".ds-test-cold-2024.05.20-000002": {
"aliases": {},
"mappings": {
"_data_stream_timestamp": {
"enabled": true
},
"properties": {
"@timestamp": {
"type": "date"
}
}
},
"settings": {
"index": {
"lifecycle": {
"name": "test"
},
"routing": {
"allocation": {
"include": {
"_tier_preference": "data_hot"
}
}
},
"hidden": "true",
"number_of_shards": "1",
"provided_name": ".ds-test-cold-2024.05.20-000002",
"creation_date": "1716175738940",
"priority": "100",
"number_of_replicas": "1",
"uuid": "nvldBwt2QxeQzpkrbK69qQ",
"version": {
"created": "8503000"
}
}
},
"data_stream": "test-cold"
}
}
"""
```
**How to test:** Add the input above in Console editor and verify that
there is no uncaught error in the browser error.
This change also resolves some highlighting inconsistencies, where some
text was incorrectly highlighted as a `method` and `url` tokens:
Before:
Now:
---
packages/kbn-monaco/src/console/lexer_rules/console_editor.ts | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/packages/kbn-monaco/src/console/lexer_rules/console_editor.ts b/packages/kbn-monaco/src/console/lexer_rules/console_editor.ts
index 97984a83b2c9f3..164a44eb9f5fbd 100644
--- a/packages/kbn-monaco/src/console/lexer_rules/console_editor.ts
+++ b/packages/kbn-monaco/src/console/lexer_rules/console_editor.ts
@@ -21,12 +21,13 @@ export const languageConfiguration: monaco.languages.LanguageConfiguration = {
export const lexerRules: monaco.languages.IMonarchLanguage = {
...consoleSharedLexerRules,
+ ignoreCase: true,
tokenizer: {
...consoleSharedLexerRules.tokenizer,
root: [
...consoleSharedLexerRules.tokenizer.root,
// method
- matchTokensWithEOL('method', /([a-zA-Z]+)/, 'root', 'method_sep'),
+ matchTokensWithEOL('method', /get|post|put|patch|delete|head/, 'root', 'method_sep'),
// whitespace
matchToken('whitespace', '\\s+'),
// text