diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 799c1baedbf1c..f2a958f6fcf3a 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -111,6 +111,7 @@ public enum MergeReason { private volatile FieldTypeLookup fieldTypes; private volatile Map fullPathObjectMappers = new HashMap<>(); private boolean hasNested = false; // updated dynamically to true when a nested object is added + private boolean allEnabled = false; // updated dynamically to true when _all is enabled private final DocumentMapperParser documentParser; @@ -150,6 +151,13 @@ public boolean hasNested() { return this.hasNested; } + /** + * Returns true if the "_all" field is enabled for the type + */ + public boolean allEnabled() { + return this.allEnabled; + } + /** * returns an immutable iterator over current document mappers. * @@ -368,6 +376,7 @@ private synchronized DocumentMapper merge(DocumentMapper mapper, MergeReason rea this.hasNested = hasNested; this.fullPathObjectMappers = fullPathObjectMappers; this.parentTypes = parentTypes; + this.allEnabled = mapper.allFieldMapper().enabled(); assert assertSerialization(newMapper); assert assertMappersShareSameFieldType(); diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java b/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java index 4ba49e5f0e9ec..7c9d958e2eb74 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java @@ -194,6 +194,10 @@ public void setIsFilter(boolean isFilter) { this.isFilter = isFilter; } + /** + * Returns all the fields that match a given pattern. If prefixed with a + * type then the fields will be returned with a type prefix. + */ public Collection simpleMatchToIndexNames(String pattern) { return mapperService.simpleMatchToIndexNames(pattern); } diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java index 2867169ecbebf..eb508b9d4a4b2 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java @@ -37,17 +37,30 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.index.mapper.IpFieldMapper; +import org.elasticsearch.index.mapper.KeywordFieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.index.mapper.ScaledFloatFieldMapper; +import org.elasticsearch.index.mapper.StringFieldMapper; +import org.elasticsearch.index.mapper.TextFieldMapper; +import org.elasticsearch.index.mapper.TimestampFieldMapper; import org.elasticsearch.index.query.support.QueryParsers; import org.joda.time.DateTimeZone; import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Optional; +import java.util.Set; import java.util.TreeMap; /** @@ -103,6 +116,24 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder ALLOWED_QUERY_MAPPER_TYPES; + + static { + ALLOWED_QUERY_MAPPER_TYPES = new HashSet<>(); + ALLOWED_QUERY_MAPPER_TYPES.add(DateFieldMapper.CONTENT_TYPE); + ALLOWED_QUERY_MAPPER_TYPES.add(IpFieldMapper.CONTENT_TYPE); + ALLOWED_QUERY_MAPPER_TYPES.add(KeywordFieldMapper.CONTENT_TYPE); + for (NumberFieldMapper.NumberType nt : NumberFieldMapper.NumberType.values()) { + ALLOWED_QUERY_MAPPER_TYPES.add(nt.typeName()); + } + ALLOWED_QUERY_MAPPER_TYPES.add(ScaledFloatFieldMapper.CONTENT_TYPE); + ALLOWED_QUERY_MAPPER_TYPES.add(StringFieldMapper.CONTENT_TYPE); + ALLOWED_QUERY_MAPPER_TYPES.add(TextFieldMapper.CONTENT_TYPE); + ALLOWED_QUERY_MAPPER_TYPES.add(TimestampFieldMapper.CONTENT_TYPE); + } private final String queryString; @@ -156,6 +187,8 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder fromXContent(QueryParseContext p String fuzzyRewrite = null; String rewrite = null; boolean splitOnWhitespace = DEFAULT_SPLIT_ON_WHITESPACE; + Boolean useAllFields = null; Map fieldsAndWeights = new HashMap<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { @@ -747,6 +800,8 @@ public static Optional fromXContent(QueryParseContext p lenient = parser.booleanValue(); } else if (parseContext.getParseFieldMatcher().match(currentFieldName, LOCALE_FIELD)) { // ignore, deprecated setting + } else if (parseContext.getParseFieldMatcher().match(currentFieldName, ALL_FIELDS_FIELD)) { + useAllFields = parser.booleanValue(); } else if (parseContext.getParseFieldMatcher().match(currentFieldName, TIME_ZONE_FIELD)) { try { timeZone = parser.text(); @@ -771,6 +826,12 @@ public static Optional fromXContent(QueryParseContext p throw new ParsingException(parser.getTokenLocation(), "[" + QueryStringQueryBuilder.NAME + "] must be provided with a [query]"); } + if ((useAllFields != null && useAllFields) && + (defaultField != null || fieldsAndWeights.size() != 0)) { + throw new ParsingException(parser.getTokenLocation(), + "cannot use [all_fields] parameter in conjunction with [default_field] or [fields]"); + } + QueryStringQueryBuilder queryStringQuery = new QueryStringQueryBuilder(queryString); queryStringQuery.fields(fieldsAndWeights); queryStringQuery.defaultField(defaultField); @@ -798,6 +859,7 @@ public static Optional fromXContent(QueryParseContext p queryStringQuery.boost(boost); queryStringQuery.queryName(queryName); queryStringQuery.splitOnWhitespace(splitOnWhitespace); + queryStringQuery.useAllFields(useAllFields); return Optional.of(queryStringQuery); } @@ -833,7 +895,8 @@ protected boolean doEquals(QueryStringQueryBuilder other) { Objects.equals(timeZone.getID(), other.timeZone.getID()) && Objects.equals(escape, other.escape) && Objects.equals(maxDeterminizedStates, other.maxDeterminizedStates) && - Objects.equals(splitOnWhitespace, other.splitOnWhitespace); + Objects.equals(splitOnWhitespace, other.splitOnWhitespace) && + Objects.equals(useAllFields, other.useAllFields); } @Override @@ -842,7 +905,29 @@ protected int doHashCode() { quoteFieldSuffix, autoGeneratePhraseQueries, allowLeadingWildcard, analyzeWildcard, enablePositionIncrements, fuzziness, fuzzyPrefixLength, fuzzyMaxExpansions, fuzzyRewrite, phraseSlop, useDisMax, tieBreaker, rewrite, minimumShouldMatch, lenient, - timeZone == null ? 0 : timeZone.getID(), escape, maxDeterminizedStates, splitOnWhitespace); + timeZone == null ? 0 : timeZone.getID(), escape, maxDeterminizedStates, splitOnWhitespace, useAllFields); + } + + private Map allQueryableDefaultFields(QueryShardContext context) { + Collection allFields = context.simpleMatchToIndexNames("*"); + Map fields = new HashMap<>(); + for (String fieldName : allFields) { + if (MapperService.isMetadataField(fieldName)) { + // Ignore our metadata fields + continue; + } + MappedFieldType mft = context.fieldMapper(fieldName); + assert mft != null : "should never have a null mapper for an existing field"; + + // Ignore fields that are not in the allowed mapper types. Some + // types do not support term queries, and thus we cannot generate + // a special query for them. + String mappingType = mft.typeName(); + if (ALLOWED_QUERY_MAPPER_TYPES.contains(mappingType)) { + fields.put(fieldName, 1.0f); + } + } + return fields; } @Override @@ -855,18 +940,39 @@ protected Query doToQuery(QueryShardContext context) throws IOException { } else { qpSettings = new QueryParserSettings(this.queryString); } - qpSettings.defaultField(this.defaultField == null ? context.defaultField() : this.defaultField); + Map resolvedFields = new TreeMap<>(); - for (Map.Entry fieldsEntry : fieldsAndWeights.entrySet()) { - String fieldName = fieldsEntry.getKey(); - Float weight = fieldsEntry.getValue(); - if (Regex.isSimpleMatchPattern(fieldName)) { - for (String resolvedFieldName : context.getMapperService().simpleMatchToIndexNames(fieldName)) { - resolvedFields.put(resolvedFieldName, weight); + + // If explicitly required to use all fields, use all fields, OR: + // Automatically determine the fields (to replace the _all field) if all of the following are true: + // - The _all field is disabled, + // - and the default_field has not been changed in the settings + // - and default_field is not specified in the request + // - and no fields are specified in the request + if ((this.useAllFields != null && this.useAllFields) || + (context.getMapperService().allEnabled() == false && + "_all".equals(context.defaultField()) && + this.defaultField == null && + this.fieldsAndWeights.size() == 0)) { + // Use the automatically determined expansion of all queryable fields + resolvedFields = allQueryableDefaultFields(context); + // Automatically set leniency to "true" so mismatched fields don't cause exceptions + qpSettings.lenient(true); + } else { + qpSettings.defaultField(this.defaultField == null ? context.defaultField() : this.defaultField); + + for (Map.Entry fieldsEntry : fieldsAndWeights.entrySet()) { + String fieldName = fieldsEntry.getKey(); + Float weight = fieldsEntry.getValue(); + if (Regex.isSimpleMatchPattern(fieldName)) { + for (String resolvedFieldName : context.getMapperService().simpleMatchToIndexNames(fieldName)) { + resolvedFields.put(resolvedFieldName, weight); + } + } else { + resolvedFields.put(fieldName, weight); } - } else { - resolvedFields.put(fieldName, weight); } + qpSettings.lenient(lenient == null ? context.queryStringLenient() : lenient); } qpSettings.fieldsAndWeights(resolvedFields); qpSettings.defaultOperator(defaultOperator.toQueryParserOperator()); @@ -905,7 +1011,6 @@ protected Query doToQuery(QueryShardContext context) throws IOException { qpSettings.useDisMax(useDisMax); qpSettings.tieBreaker(tieBreaker); qpSettings.rewriteMethod(QueryParsers.parseRewriteMethod(context.getParseFieldMatcher(), this.rewrite)); - qpSettings.lenient(lenient == null ? context.queryStringLenient() : lenient); qpSettings.timeZone(timeZone); qpSettings.maxDeterminizedStates(maxDeterminizedStates); qpSettings.splitOnWhitespace(splitOnWhitespace); diff --git a/core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java b/core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java index 917b1d80ca39b..9ac7e2e752075 100644 --- a/core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java @@ -31,6 +31,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.index.mapper.MappedFieldType; @@ -256,6 +257,14 @@ static Query blendTerm(QueryShardContext context, BytesRef value, Float commonTe // of ip addresses and the value can't be parsed, so ignore this // field continue; + } catch (ElasticsearchParseException parseException) { + // date fields throw an ElasticsearchParseException with the + // underlying IAE as the cause, ignore this field if that is + // the case + if (parseException.getCause() instanceof IllegalArgumentException) { + continue; + } + throw parseException; } float boost = ft.boost; while (query instanceof BoostQuery) { diff --git a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 1f88da63cc215..1410e1f2c5a67 100644 --- a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -106,6 +106,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ public static final ParseField PROFILE_FIELD = new ParseField("profile"); public static final ParseField SEARCH_AFTER = new ParseField("search_after"); public static final ParseField SLICE = new ParseField("slice"); + public static final ParseField ALL_FIELDS_FIELDS = new ParseField("all_fields"); public static SearchSourceBuilder fromXContent(QueryParseContext context, AggregatorParsers aggParsers, Suggesters suggesters, SearchExtRegistry searchExtRegistry) throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index d89cb6702e04c..893c2ec4e5613 100644 --- a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -40,6 +40,7 @@ import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.TooComplexToDeterminizeException; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.lucene.all.AllTermQuery; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.search.internal.SearchContext; @@ -735,5 +736,32 @@ public void testExpandedTerms() throws Exception { .toQuery(createShardContext()); assertEquals(new TermRangeQuery(STRING_FIELD_NAME, new BytesRef("abc"), new BytesRef("bcd"), true, true), query); } - + + public void testAllFieldsWithFields() throws IOException { + String json = + "{\n" + + " \"query_string\" : {\n" + + " \"query\" : \"this AND that OR thus\",\n" + + " \"fields\" : [\"foo\"],\n" + + " \"all_fields\" : true\n" + + " }\n" + + "}"; + + ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json)); + assertThat(e.getMessage(), + containsString("cannot use [all_fields] parameter in conjunction with [default_field] or [fields]")); + + String json2 = + "{\n" + + " \"query_string\" : {\n" + + " \"query\" : \"this AND that OR thus\",\n" + + " \"default_field\" : \"foo\",\n" + + " \"all_fields\" : true\n" + + " }\n" + + "}"; + + e = expectThrows(ParsingException.class, () -> parseQuery(json2)); + assertThat(e.getMessage(), + containsString("cannot use [all_fields] parameter in conjunction with [default_field] or [fields]")); + } } diff --git a/core/src/test/java/org/elasticsearch/search/query/QueryStringIT.java b/core/src/test/java/org/elasticsearch/search/query/QueryStringIT.java new file mode 100644 index 0000000000000..b1b0da73c357d --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/query/QueryStringIT.java @@ -0,0 +1,254 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.query; + +import org.apache.lucene.util.LuceneTestCase; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; +import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.query.QueryStringQueryBuilder; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.test.ESIntegTestCase; +import org.junit.Before; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; +import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirstHit; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSecondHit; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.lessThan; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class QueryStringIT extends ESIntegTestCase { + + @Before + public void setup() throws Exception { + String indexBody = copyToStringFromClasspath("/org/elasticsearch/search/query/all-query-index.json"); + prepareCreate("test").setSource(indexBody).get(); + ensureGreen("test"); + } + + private QueryStringQueryBuilder lenientQuery(String queryText) { + return queryStringQuery(queryText).lenient(true); + } + + public void testBasicAllQuery() throws Exception { + List reqs = new ArrayList<>(); + reqs.add(client().prepareIndex("test", "doc", "1").setSource("f1", "foo bar baz")); + reqs.add(client().prepareIndex("test", "doc", "2").setSource("f2", "Bar")); + reqs.add(client().prepareIndex("test", "doc", "3").setSource("f3", "foo bar baz")); + indexRandom(true, false, reqs); + + SearchResponse resp = client().prepareSearch("test").setQuery(queryStringQuery("foo")).get(); + assertHitCount(resp, 2L); + assertHits(resp.getHits(), "1", "3"); + + resp = client().prepareSearch("test").setQuery(queryStringQuery("bar")).get(); + assertHitCount(resp, 2L); + assertHits(resp.getHits(), "1", "3"); + + resp = client().prepareSearch("test").setQuery(queryStringQuery("Bar")).get(); + assertHitCount(resp, 3L); + assertHits(resp.getHits(), "1", "2", "3"); + + resp = client().prepareSearch("test").setQuery(queryStringQuery("foa")).get(); + assertHitCount(resp, 1L); + assertHits(resp.getHits(), "3"); + } + + public void testWithDate() throws Exception { + List reqs = new ArrayList<>(); + reqs.add(client().prepareIndex("test", "doc", "1").setSource("f1", "foo", "f_date", "2015/09/02")); + reqs.add(client().prepareIndex("test", "doc", "2").setSource("f1", "bar", "f_date", "2015/09/01")); + indexRandom(true, false, reqs); + + SearchResponse resp = client().prepareSearch("test").setQuery(queryStringQuery("foo bar")).get(); + assertHits(resp.getHits(), "1", "2"); + assertHitCount(resp, 2L); + + resp = client().prepareSearch("test").setQuery(queryStringQuery("\"2015/09/02\"")).get(); + assertHits(resp.getHits(), "1"); + assertHitCount(resp, 1L); + + resp = client().prepareSearch("test").setQuery(queryStringQuery("bar \"2015/09/02\"")).get(); + assertHits(resp.getHits(), "1", "2"); + assertHitCount(resp, 2L); + + resp = client().prepareSearch("test").setQuery(queryStringQuery("\"2015/09/02\" \"2015/09/01\"")).get(); + assertHits(resp.getHits(), "1", "2"); + assertHitCount(resp, 2L); + } + + public void testWithLotsOfTypes() throws Exception { + List reqs = new ArrayList<>(); + reqs.add(client().prepareIndex("test", "doc", "1").setSource("f1", "foo", + "f_date", "2015/09/02", + "f_float", "1.7", + "f_ip", "127.0.0.1")); + reqs.add(client().prepareIndex("test", "doc", "2").setSource("f1", "bar", + "f_date", "2015/09/01", + "f_float", "1.8", + "f_ip", "127.0.0.2")); + indexRandom(true, false, reqs); + + SearchResponse resp = client().prepareSearch("test").setQuery(queryStringQuery("foo bar")).get(); + assertHits(resp.getHits(), "1", "2"); + assertHitCount(resp, 2L); + + resp = client().prepareSearch("test").setQuery(queryStringQuery("\"2015/09/02\"")).get(); + assertHits(resp.getHits(), "1"); + assertHitCount(resp, 1L); + + resp = client().prepareSearch("test").setQuery(queryStringQuery("127.0.0.2 \"2015/09/02\"")).get(); + assertHits(resp.getHits(), "1", "2"); + assertHitCount(resp, 2L); + + resp = client().prepareSearch("test").setQuery(queryStringQuery("127.0.0.1 1.8")).get(); + assertHits(resp.getHits(), "1", "2"); + assertHitCount(resp, 2L); + } + + public void testDocWithAllTypes() throws Exception { + List reqs = new ArrayList<>(); + String docBody = copyToStringFromClasspath("/org/elasticsearch/search/query/all-example-document.json"); + reqs.add(client().prepareIndex("test", "doc", "1").setSource(docBody)); + indexRandom(true, false, reqs); + + SearchResponse resp = client().prepareSearch("test").setQuery(queryStringQuery("foo")).get(); + assertHits(resp.getHits(), "1"); + resp = client().prepareSearch("test").setQuery(queryStringQuery("Bar")).get(); + assertHits(resp.getHits(), "1"); + resp = client().prepareSearch("test").setQuery(queryStringQuery("Baz")).get(); + assertHits(resp.getHits(), "1"); + resp = client().prepareSearch("test").setQuery(queryStringQuery("sbaz")).get(); + assertHits(resp.getHits(), "1"); + resp = client().prepareSearch("test").setQuery(queryStringQuery("19")).get(); + assertHits(resp.getHits(), "1"); + // nested doesn't match because it's hidden + resp = client().prepareSearch("test").setQuery(queryStringQuery("1476383971")).get(); + assertHits(resp.getHits(), "1"); + // bool doesn't match + resp = client().prepareSearch("test").setQuery(queryStringQuery("7")).get(); + assertHits(resp.getHits(), "1"); + resp = client().prepareSearch("test").setQuery(queryStringQuery("23")).get(); + assertHits(resp.getHits(), "1"); + resp = client().prepareSearch("test").setQuery(queryStringQuery("1293")).get(); + assertHits(resp.getHits(), "1"); + resp = client().prepareSearch("test").setQuery(queryStringQuery("42")).get(); + assertHits(resp.getHits(), "1"); + resp = client().prepareSearch("test").setQuery(queryStringQuery("1.7")).get(); + assertHits(resp.getHits(), "1"); + resp = client().prepareSearch("test").setQuery(queryStringQuery("1.5")).get(); + assertHits(resp.getHits(), "1"); + resp = client().prepareSearch("test").setQuery(queryStringQuery("12.23")).get(); + assertHits(resp.getHits(), "1"); + resp = client().prepareSearch("test").setQuery(queryStringQuery("127.0.0.1")).get(); + assertHits(resp.getHits(), "1"); + // binary doesn't match + // suggest doesn't match + // geo_point doesn't match + // geo_shape doesn't match + } + + public void testKeywordWithWhitespace() throws Exception { + List reqs = new ArrayList<>(); + reqs.add(client().prepareIndex("test", "doc", "1").setSource("f2", "Foo Bar")); + reqs.add(client().prepareIndex("test", "doc", "2").setSource("f1", "bar")); + reqs.add(client().prepareIndex("test", "doc", "3").setSource("f1", "foo bar")); + indexRandom(true, false, reqs); + + SearchResponse resp = client().prepareSearch("test").setQuery(queryStringQuery("foo")).get(); + assertHits(resp.getHits(), "3"); + assertHitCount(resp, 1L); + + resp = client().prepareSearch("test").setQuery(queryStringQuery("bar")).get(); + assertHits(resp.getHits(), "2", "3"); + assertHitCount(resp, 2L); + + // Will be fixed once https://github.com/elastic/elasticsearch/pull/20965 is in + // resp = client().prepareSearch("test") + // .setQuery(queryStringQuery("Foo Bar").splitOnWhitespcae(false)) + // .get(); + // assertHits(resp.getHits(), "1", "2", "3"); + // assertHitCount(resp, 3L); + } + + public void testExplicitAllFieldsRequested() throws Exception { + List reqs = new ArrayList<>(); + reqs.add(client().prepareIndex("test", "doc", "1").setSource("f1", "foo", + "f_date", "2015/09/02", + "f_float", "1.7", + "f_ip", "127.0.0.1")); + reqs.add(client().prepareIndex("test", "doc", "2").setSource("f1", "bar", + "f_date", "2015/09/01", + "f_float", "1.8", + "f_ip", "127.0.0.2")); + indexRandom(true, false, reqs); + + SearchResponse resp = client().prepareSearch("test").setQuery( + queryStringQuery("127.0.0.2 \"2015/09/02\"") + .field("f_ip") // Usually this would mean we wouldn't search "all" fields + .useAllFields(true)) // ... unless explicitly requested + .get(); + assertHits(resp.getHits(), "1", "2"); + assertHitCount(resp, 2L); + } + + @LuceneTestCase.AwaitsFix(bugUrl="currently can't perform phrase queries on fields that don't support positions") + public void testPhraseQueryOnFieldWithNoPositions() throws Exception { + List reqs = new ArrayList<>(); + reqs.add(client().prepareIndex("test", "doc", "1").setSource("f1", "foo bar", "f4", "eggplant parmesan")); + reqs.add(client().prepareIndex("test", "doc", "2").setSource("f1", "foo bar", "f4", "chicken parmesan")); + indexRandom(true, false, reqs); + + SearchResponse resp = client().prepareSearch("test").setQuery(queryStringQuery("\"eggplant parmesan\"")).get(); + assertHits(resp.getHits(), "1"); + assertHitCount(resp, 1L); + } + + private void assertHits(SearchHits hits, String... ids) { + assertThat(hits.totalHits(), equalTo((long) ids.length)); + Set hitIds = new HashSet<>(); + for (SearchHit hit : hits.getHits()) { + hitIds.add(hit.id()); + } + assertThat(hitIds, containsInAnyOrder(ids)); + } + +} diff --git a/core/src/test/resources/org/elasticsearch/search/query/all-example-document.json b/core/src/test/resources/org/elasticsearch/search/query/all-example-document.json new file mode 100644 index 0000000000000..9e4d04930a71a --- /dev/null +++ b/core/src/test/resources/org/elasticsearch/search/query/all-example-document.json @@ -0,0 +1,36 @@ +{ + "f1": "foo", + "f2": "Bar", + "f3": "foo bar baz", + "f_multi": "Foo Bar Baz", + "f_object": { + "sub1": "sfoo", + "sub2":"sbar", + "sub3":19 + }, + "f_nested": { + "nest1": "nfoo", + "nest2":"nbar", + "nest3":21 + }, + "f_date": "1476383971", + "f_bool": "true", + "f_byte": "7", + "f_short": "23", + "f_int": "1293", + "f_long": "42", + "f_float": "1.7", + "f_hfloat": "1.5", + "f_sfloat": "12.23", + "f_ip": "127.0.0.1", + "f_binary": "VGhpcyBpcyBzb21lIGJpbmFyeSBkYXRhCg==", + "f_suggest": { + "input": ["Nevermind", "Nirvana"], + "weight": 34 + }, + "f_geop": "41.12,-71.34", + "f_geos": { + "type": "point", + "coordinates": [-77.03653, 38.897676] + } +} diff --git a/core/src/test/resources/org/elasticsearch/search/query/all-query-index.json b/core/src/test/resources/org/elasticsearch/search/query/all-query-index.json new file mode 100644 index 0000000000000..1a67abcfcb38f --- /dev/null +++ b/core/src/test/resources/org/elasticsearch/search/query/all-query-index.json @@ -0,0 +1,80 @@ +{ + "settings": { + "index": { + "number_of_shards": 1, + "number_of_replicas": 0, + "analysis": { + "analyzer": { + "my_ngrams": { + "type": "custom", + "tokenizer": "standard", + "filter": ["my_ngrams"] + } + }, + "filter": { + "my_ngrams": { + "type": "ngram", + "min_gram": 2, + "max_gram": 2 + } + } + } + } + }, + "mappings": { + "doc": { + "_all": { + "enabled": false + }, + "properties": { + "f1": {"type": "text"}, + "f2": {"type": "keyword"}, + "f3": {"type": "text", "analyzer": "my_ngrams"}, + "f4": { + "type": "text", + "index_options": "docs" + }, + "f_multi": { + "type": "text", + "fields": { + "raw": {"type": "keyword"}, + "f_token_count": {"type": "token_count", "analyzer": "standard"} + } + }, + "f_object": { + "type": "object", + "properties": { + "sub1": {"type": "text"}, + "sub2": {"type": "keyword"}, + "sub3": {"type": "integer"} + } + }, + "f_nested": { + "type": "nested", + "properties": { + "nest1": {"type": "text"}, + "nest2": {"type": "keyword"}, + "nest3": {"type": "integer"} + } + }, + "f_date": { + "type": "date", + "format": "yyyy/MM/dd||epoch_millis" + }, + "f_bool": {"type": "boolean"}, + "f_byte": {"type": "byte"}, + "f_short": {"type": "short"}, + "f_int": {"type": "integer"}, + "f_long": {"type": "long"}, + "f_float": {"type": "float"}, + "f_hfloat": {"type": "half_float"}, + "f_sfloat": {"type": "scaled_float", "scaling_factor": 100}, + "f_ip": {"type": "ip"}, + "f_binary": {"type": "binary"}, + "f_suggest": {"type": "completion"}, + "f_geop": {"type": "geo_point"}, + "f_geos": {"type": "geo_shape"} + } + } + } +} diff --git a/docs/reference/query-dsl/query-string-query.asciidoc b/docs/reference/query-dsl/query-string-query.asciidoc index 28d8fcffbe262..70f976d1d56c8 100644 --- a/docs/reference/query-dsl/query-string-query.asciidoc +++ b/docs/reference/query-dsl/query-string-query.asciidoc @@ -88,6 +88,11 @@ comprehensive example. Instead the queryparser would parse around only real 'operators'. Default to `false`. +|`all_fields` | Perform the query on all fields detected in the mapping that can +be queried. Will be used by default when the `_all` field is disabled and no +`default_field` is specified (either in the index settings or in the request +body) and no `fields` are specified. + |======================================================================= When a multi term query is being generated, one can control how it gets @@ -102,8 +107,9 @@ When not explicitly specifying the field to search on in the query string syntax, the `index.query.default_field` will be used to derive which field to search on. It defaults to `_all` field. -So, if `_all` field is disabled, it might make sense to change it to set -a different default field. +If the `_all` field is disabled, the `query_string` query will automatically +attempt to determine the existing fields in the index's mapping that are +queryable, and perform the search on those fields. [float] ==== Multi Field